var/home/core/zuul-output/0000755000175000017500000000000015117020047014522 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015117024163015471 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003305030615117024155017676 0ustar rootrootDec 12 14:03:39 crc systemd[1]: Starting Kubernetes Kubelet... Dec 12 14:03:39 crc restorecon[4574]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:39 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 12 14:03:40 crc restorecon[4574]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 12 14:03:40 crc kubenswrapper[4663]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 12 14:03:40 crc kubenswrapper[4663]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 12 14:03:40 crc kubenswrapper[4663]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 12 14:03:40 crc kubenswrapper[4663]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 12 14:03:40 crc kubenswrapper[4663]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 12 14:03:40 crc kubenswrapper[4663]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.749724 4663 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753446 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753462 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753467 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753471 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753476 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753480 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753484 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753488 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753491 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753494 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753497 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753501 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753504 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753507 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753510 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753513 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753516 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753519 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753522 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753525 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753529 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753532 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753535 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753538 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753541 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753544 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753547 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753551 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753554 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753558 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753562 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753565 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753568 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753573 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753576 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753580 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753583 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753587 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753592 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753596 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753605 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753609 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753613 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753616 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753620 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753623 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753626 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753629 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753632 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753635 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753638 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753641 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753644 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753648 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753651 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753654 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753657 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753660 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753663 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753666 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753669 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753673 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753676 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753679 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753682 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753685 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753688 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753691 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753695 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753699 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.753703 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754594 4663 flags.go:64] FLAG: --address="0.0.0.0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754605 4663 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754613 4663 flags.go:64] FLAG: --anonymous-auth="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754617 4663 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754622 4663 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754626 4663 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754630 4663 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754634 4663 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754638 4663 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754642 4663 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754646 4663 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754650 4663 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754654 4663 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754657 4663 flags.go:64] FLAG: --cgroup-root="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754661 4663 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754664 4663 flags.go:64] FLAG: --client-ca-file="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754668 4663 flags.go:64] FLAG: --cloud-config="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754671 4663 flags.go:64] FLAG: --cloud-provider="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754674 4663 flags.go:64] FLAG: --cluster-dns="[]" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754679 4663 flags.go:64] FLAG: --cluster-domain="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754682 4663 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754686 4663 flags.go:64] FLAG: --config-dir="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754689 4663 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754693 4663 flags.go:64] FLAG: --container-log-max-files="5" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754697 4663 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754701 4663 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754709 4663 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754713 4663 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754717 4663 flags.go:64] FLAG: --contention-profiling="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754720 4663 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754724 4663 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754729 4663 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754732 4663 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754736 4663 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754756 4663 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754760 4663 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754764 4663 flags.go:64] FLAG: --enable-load-reader="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754768 4663 flags.go:64] FLAG: --enable-server="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754771 4663 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754776 4663 flags.go:64] FLAG: --event-burst="100" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754781 4663 flags.go:64] FLAG: --event-qps="50" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754784 4663 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754788 4663 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754792 4663 flags.go:64] FLAG: --eviction-hard="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754797 4663 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754801 4663 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754805 4663 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754809 4663 flags.go:64] FLAG: --eviction-soft="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754813 4663 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754816 4663 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754820 4663 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754824 4663 flags.go:64] FLAG: --experimental-mounter-path="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754827 4663 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754831 4663 flags.go:64] FLAG: --fail-swap-on="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754834 4663 flags.go:64] FLAG: --feature-gates="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754838 4663 flags.go:64] FLAG: --file-check-frequency="20s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754842 4663 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754846 4663 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754849 4663 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754853 4663 flags.go:64] FLAG: --healthz-port="10248" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754857 4663 flags.go:64] FLAG: --help="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754860 4663 flags.go:64] FLAG: --hostname-override="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754864 4663 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754868 4663 flags.go:64] FLAG: --http-check-frequency="20s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754872 4663 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754876 4663 flags.go:64] FLAG: --image-credential-provider-config="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754879 4663 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754883 4663 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754886 4663 flags.go:64] FLAG: --image-service-endpoint="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754890 4663 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754894 4663 flags.go:64] FLAG: --kube-api-burst="100" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754898 4663 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754903 4663 flags.go:64] FLAG: --kube-api-qps="50" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754906 4663 flags.go:64] FLAG: --kube-reserved="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754910 4663 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754914 4663 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754917 4663 flags.go:64] FLAG: --kubelet-cgroups="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754921 4663 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754925 4663 flags.go:64] FLAG: --lock-file="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754928 4663 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754932 4663 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754935 4663 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754945 4663 flags.go:64] FLAG: --log-json-split-stream="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754949 4663 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754952 4663 flags.go:64] FLAG: --log-text-split-stream="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754956 4663 flags.go:64] FLAG: --logging-format="text" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754960 4663 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754964 4663 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754967 4663 flags.go:64] FLAG: --manifest-url="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754971 4663 flags.go:64] FLAG: --manifest-url-header="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754976 4663 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754980 4663 flags.go:64] FLAG: --max-open-files="1000000" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754984 4663 flags.go:64] FLAG: --max-pods="110" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754988 4663 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754992 4663 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754996 4663 flags.go:64] FLAG: --memory-manager-policy="None" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.754999 4663 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755003 4663 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755007 4663 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755011 4663 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755019 4663 flags.go:64] FLAG: --node-status-max-images="50" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755023 4663 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755027 4663 flags.go:64] FLAG: --oom-score-adj="-999" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755031 4663 flags.go:64] FLAG: --pod-cidr="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755035 4663 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755040 4663 flags.go:64] FLAG: --pod-manifest-path="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755044 4663 flags.go:64] FLAG: --pod-max-pids="-1" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755048 4663 flags.go:64] FLAG: --pods-per-core="0" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755051 4663 flags.go:64] FLAG: --port="10250" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755055 4663 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755059 4663 flags.go:64] FLAG: --provider-id="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755062 4663 flags.go:64] FLAG: --qos-reserved="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755066 4663 flags.go:64] FLAG: --read-only-port="10255" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755070 4663 flags.go:64] FLAG: --register-node="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755074 4663 flags.go:64] FLAG: --register-schedulable="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755077 4663 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755083 4663 flags.go:64] FLAG: --registry-burst="10" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755087 4663 flags.go:64] FLAG: --registry-qps="5" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755091 4663 flags.go:64] FLAG: --reserved-cpus="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755094 4663 flags.go:64] FLAG: --reserved-memory="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755098 4663 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755102 4663 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755106 4663 flags.go:64] FLAG: --rotate-certificates="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755110 4663 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755113 4663 flags.go:64] FLAG: --runonce="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755117 4663 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755120 4663 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755124 4663 flags.go:64] FLAG: --seccomp-default="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755131 4663 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755137 4663 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755141 4663 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755144 4663 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755148 4663 flags.go:64] FLAG: --storage-driver-password="root" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755151 4663 flags.go:64] FLAG: --storage-driver-secure="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755155 4663 flags.go:64] FLAG: --storage-driver-table="stats" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755159 4663 flags.go:64] FLAG: --storage-driver-user="root" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755162 4663 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755166 4663 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755169 4663 flags.go:64] FLAG: --system-cgroups="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755173 4663 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755179 4663 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755182 4663 flags.go:64] FLAG: --tls-cert-file="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755186 4663 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755190 4663 flags.go:64] FLAG: --tls-min-version="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755194 4663 flags.go:64] FLAG: --tls-private-key-file="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755197 4663 flags.go:64] FLAG: --topology-manager-policy="none" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755201 4663 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755204 4663 flags.go:64] FLAG: --topology-manager-scope="container" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755208 4663 flags.go:64] FLAG: --v="2" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755213 4663 flags.go:64] FLAG: --version="false" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755218 4663 flags.go:64] FLAG: --vmodule="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755223 4663 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.755227 4663 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755330 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755335 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755339 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755343 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755346 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755350 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755354 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755373 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755378 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755381 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755385 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755388 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755392 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755395 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755398 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755401 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755404 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755407 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755410 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755413 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755416 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755419 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755423 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755426 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755429 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755433 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755436 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755439 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755443 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755447 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755451 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755455 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755458 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755462 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755466 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755469 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755472 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755476 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755479 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755483 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755487 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755490 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755493 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755496 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755500 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755504 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755508 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755511 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755514 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755518 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755521 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755524 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755528 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755531 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755534 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755537 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755540 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755543 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755547 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755552 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755555 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755559 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755563 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755566 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755570 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755573 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755576 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755579 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755583 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755586 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.755589 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.756152 4663 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.761805 4663 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.761825 4663 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761884 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761903 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761908 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761913 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761916 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761921 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761926 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761930 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761934 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761938 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761941 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761944 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761947 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761951 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761954 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761957 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761960 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761964 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761968 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761972 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761976 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761981 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761985 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761988 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761992 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761996 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.761999 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762003 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762006 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762009 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762012 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762016 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762019 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762022 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762025 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762029 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762032 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762035 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762038 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762041 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762045 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762048 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762051 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762054 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762057 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762061 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762064 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762067 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762070 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762073 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762077 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762080 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762083 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762086 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762089 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762093 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762097 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762101 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762105 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762108 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762111 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762116 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762119 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762123 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762126 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762131 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762134 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762137 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762141 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762144 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762147 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.762152 4663 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.762794 4663 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763014 4663 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763021 4663 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763026 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763030 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763035 4663 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763041 4663 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763125 4663 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763145 4663 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763150 4663 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763154 4663 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763159 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763163 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763167 4663 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763170 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763174 4663 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763177 4663 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763181 4663 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763186 4663 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763194 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763198 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763202 4663 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763205 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763209 4663 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763213 4663 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763221 4663 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763225 4663 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763231 4663 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763236 4663 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763240 4663 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763244 4663 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763251 4663 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763254 4663 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763258 4663 feature_gate.go:330] unrecognized feature gate: Example Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763262 4663 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763266 4663 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763274 4663 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763278 4663 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763282 4663 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763286 4663 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763290 4663 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763294 4663 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763298 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763301 4663 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763307 4663 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763310 4663 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763314 4663 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763318 4663 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763321 4663 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763325 4663 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763328 4663 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763331 4663 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763335 4663 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763338 4663 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763342 4663 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763345 4663 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763468 4663 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763472 4663 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763492 4663 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763495 4663 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763499 4663 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763502 4663 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763506 4663 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763511 4663 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763514 4663 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763517 4663 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763675 4663 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763686 4663 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763692 4663 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763697 4663 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.763701 4663 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.763709 4663 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.763890 4663 server.go:940] "Client rotation is on, will bootstrap in background" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.766514 4663 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.766588 4663 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.767811 4663 server.go:997] "Starting client certificate rotation" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.767836 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.768000 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-13 08:18:24.679446096 +0000 UTC Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.768079 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.780319 4663 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.781320 4663 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.781766 4663 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.789791 4663 log.go:25] "Validated CRI v1 runtime API" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.807044 4663 log.go:25] "Validated CRI v1 image API" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.808116 4663 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.811104 4663 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-12-14-00-15-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.811127 4663 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.824267 4663 manager.go:217] Machine: {Timestamp:2025-12-12 14:03:40.822512946 +0000 UTC m=+0.247931010 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445406 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:634273e5-e49c-4fb3-a677-4f289806af55 BootID:fd6d9db9-7d59-43ab-8347-890bc49c2a90 Filesystems:[{Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:cb:6b:bb Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:cb:6b:bb Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:7e:4c:3a Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:46:8b:66 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:15:47:4d Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:46:70:74 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:62:bd:09:0f:dd:9b Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:52:57:19:66:9c:08 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.824437 4663 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.824515 4663 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825266 4663 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825435 4663 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825458 4663 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825598 4663 topology_manager.go:138] "Creating topology manager with none policy" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825606 4663 container_manager_linux.go:303] "Creating device plugin manager" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825951 4663 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.825977 4663 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.826066 4663 state_mem.go:36] "Initialized new in-memory state store" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.826136 4663 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.827858 4663 kubelet.go:418] "Attempting to sync node with API server" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.827876 4663 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.827893 4663 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.827903 4663 kubelet.go:324] "Adding apiserver pod source" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.827911 4663 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.830010 4663 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.830448 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.830466 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.830501 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.830525 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.830707 4663 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.832542 4663 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833797 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833819 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833826 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833836 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833845 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833851 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833859 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833869 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833876 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833882 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833891 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.833897 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.834422 4663 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.834740 4663 server.go:1280] "Started kubelet" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835308 4663 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835495 4663 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835497 4663 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 12 14:03:40 crc systemd[1]: Started Kubernetes Kubelet. Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835822 4663 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835952 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835968 4663 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.835997 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 11:18:06.258892858 +0000 UTC Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.836180 4663 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.836190 4663 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.836220 4663 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.836416 4663 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.836618 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.836668 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.837721 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="200ms" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.839216 4663 server.go:460] "Adding debug handlers to kubelet server" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.841252 4663 factory.go:153] Registering CRI-O factory Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.843297 4663 factory.go:221] Registration of the crio container factory successfully Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.839267 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.26.204:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18807cc0a8ad1d33 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-12 14:03:40.834716979 +0000 UTC m=+0.260135064,LastTimestamp:2025-12-12 14:03:40.834716979 +0000 UTC m=+0.260135064,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.843440 4663 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.843463 4663 factory.go:55] Registering systemd factory Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.843474 4663 factory.go:221] Registration of the systemd container factory successfully Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.843492 4663 factory.go:103] Registering Raw factory Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.843504 4663 manager.go:1196] Started watching for new ooms in manager Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.844796 4663 manager.go:319] Starting recovery of all containers Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847825 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847871 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847881 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847891 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847900 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847910 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847920 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847929 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847939 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847947 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847956 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847965 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847976 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847988 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.847996 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848016 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848036 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848044 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848054 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848062 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848070 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848077 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848085 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848093 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848102 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848109 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848120 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848130 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848138 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848146 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848154 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848173 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848182 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848203 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848211 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848219 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848226 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848234 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848247 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848254 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848262 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848269 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848278 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848286 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848293 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848302 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848309 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848326 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848334 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848342 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848376 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848384 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848395 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848404 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848414 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848423 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848432 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848439 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848449 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848455 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848462 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848470 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848487 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848494 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848502 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848509 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848517 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848525 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848532 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848540 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848550 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848557 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848566 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848573 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848581 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848588 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848597 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848604 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848612 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848620 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848628 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848636 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848644 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848653 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848660 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848668 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848677 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848684 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848692 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848699 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848706 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848714 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848721 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848729 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848736 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848758 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848768 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848776 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848785 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848793 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848800 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848808 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848817 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848826 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848869 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848882 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848890 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848900 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848909 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848918 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848928 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848937 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848944 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848952 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848960 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848967 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848988 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.848995 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849003 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849011 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849019 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849026 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849034 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849044 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849052 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849060 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849068 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849076 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849083 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849091 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849098 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849106 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849114 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849122 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849131 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849138 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849147 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849154 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849162 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849170 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849178 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849186 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849193 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849201 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849210 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849218 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849226 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849244 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849252 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849260 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849278 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849286 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849294 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849300 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849308 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849316 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849324 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849331 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849338 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849345 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849353 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849370 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849378 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849386 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849394 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849401 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849410 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849417 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849426 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849434 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849441 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849448 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849456 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849463 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849470 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849478 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849484 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849492 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849499 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849507 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849514 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849522 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849531 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849538 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849545 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849553 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849561 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849569 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849576 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849583 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849591 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849599 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849608 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849616 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849624 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849632 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.849640 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850677 4663 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850696 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850706 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850715 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850723 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850731 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850739 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850763 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850771 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850780 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850789 4663 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850797 4663 reconstruct.go:97] "Volume reconstruction finished" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.850804 4663 reconciler.go:26] "Reconciler: start to sync state" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.864621 4663 manager.go:324] Recovery completed Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.867319 4663 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.868810 4663 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.868843 4663 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.868862 4663 kubelet.go:2335] "Starting kubelet main sync loop" Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.868925 4663 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 12 14:03:40 crc kubenswrapper[4663]: W1212 14:03:40.870080 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.870158 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.874099 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.875292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.875319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.875327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.876662 4663 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.876682 4663 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.876697 4663 state_mem.go:36] "Initialized new in-memory state store" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.880576 4663 policy_none.go:49] "None policy: Start" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.881082 4663 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.881105 4663 state_mem.go:35] "Initializing new in-memory state store" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.924452 4663 manager.go:334] "Starting Device Plugin manager" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925193 4663 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925214 4663 server.go:79] "Starting device plugin registration server" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925466 4663 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925483 4663 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925709 4663 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925789 4663 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.925801 4663 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 12 14:03:40 crc kubenswrapper[4663]: E1212 14:03:40.930507 4663 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.969282 4663 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.969375 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.970125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.970279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.970301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.970421 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.970654 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.970698 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971250 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971257 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971323 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971334 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971354 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971522 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971550 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971880 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971890 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.971980 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972025 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972071 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972108 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972484 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972507 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972514 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972591 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972691 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972717 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.972786 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973175 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973191 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973237 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973265 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:40 crc kubenswrapper[4663]: I1212 14:03:40.973801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.026276 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.026918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.026995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.027059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.027126 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 12 14:03:41 crc kubenswrapper[4663]: E1212 14:03:41.027480 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.204:6443: connect: connection refused" node="crc" Dec 12 14:03:41 crc kubenswrapper[4663]: E1212 14:03:41.038532 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="400ms" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053731 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053771 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053790 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053805 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053821 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053835 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053849 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053891 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053924 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053946 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053969 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.053983 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.054033 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.054049 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.054061 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.154860 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.154996 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155093 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155198 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155014 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155200 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155268 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155044 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155306 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155334 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155283 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155357 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155372 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155388 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155402 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155409 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155425 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155438 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155414 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155482 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155501 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155518 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155458 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155540 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155439 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155592 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155600 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155613 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155620 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.155889 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.227984 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.228829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.228866 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.228877 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.228895 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 12 14:03:41 crc kubenswrapper[4663]: E1212 14:03:41.229212 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.204:6443: connect: connection refused" node="crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.303342 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.317015 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: W1212 14:03:41.325210 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-090f1228f3af49488034e676634498d89c038421f8c9502604c27d69d9e21e0e WatchSource:0}: Error finding container 090f1228f3af49488034e676634498d89c038421f8c9502604c27d69d9e21e0e: Status 404 returned error can't find the container with id 090f1228f3af49488034e676634498d89c038421f8c9502604c27d69d9e21e0e Dec 12 14:03:41 crc kubenswrapper[4663]: W1212 14:03:41.330663 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-0f8d998da1ad53bc0bcd45d36154c8261e665afb864096f96c13f26393058209 WatchSource:0}: Error finding container 0f8d998da1ad53bc0bcd45d36154c8261e665afb864096f96c13f26393058209: Status 404 returned error can't find the container with id 0f8d998da1ad53bc0bcd45d36154c8261e665afb864096f96c13f26393058209 Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.340447 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.344915 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: W1212 14:03:41.346578 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-87a0ad39b501359370db6c544f2b2e6f9fa3169b64a435086811401380995b49 WatchSource:0}: Error finding container 87a0ad39b501359370db6c544f2b2e6f9fa3169b64a435086811401380995b49: Status 404 returned error can't find the container with id 87a0ad39b501359370db6c544f2b2e6f9fa3169b64a435086811401380995b49 Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.348333 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 12 14:03:41 crc kubenswrapper[4663]: W1212 14:03:41.352522 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-acc2a7135b44eb64052ee675e3ddf2abb1ebc3f774ab516e703050b5140620dd WatchSource:0}: Error finding container acc2a7135b44eb64052ee675e3ddf2abb1ebc3f774ab516e703050b5140620dd: Status 404 returned error can't find the container with id acc2a7135b44eb64052ee675e3ddf2abb1ebc3f774ab516e703050b5140620dd Dec 12 14:03:41 crc kubenswrapper[4663]: W1212 14:03:41.357363 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-6902e7e37f27d26bb03c10e694ae8711124531d59e03bfd6c360d621169f07e3 WatchSource:0}: Error finding container 6902e7e37f27d26bb03c10e694ae8711124531d59e03bfd6c360d621169f07e3: Status 404 returned error can't find the container with id 6902e7e37f27d26bb03c10e694ae8711124531d59e03bfd6c360d621169f07e3 Dec 12 14:03:41 crc kubenswrapper[4663]: E1212 14:03:41.441740 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="800ms" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.630278 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.631227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.631255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.631264 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.631281 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 12 14:03:41 crc kubenswrapper[4663]: E1212 14:03:41.631570 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.204:6443: connect: connection refused" node="crc" Dec 12 14:03:41 crc kubenswrapper[4663]: W1212 14:03:41.656672 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:41 crc kubenswrapper[4663]: E1212 14:03:41.656724 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.836328 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 00:30:45.923875621 +0000 UTC Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.836384 4663 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 634h27m4.087494336s for next certificate rotation Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.836758 4663 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.873268 4663 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d" exitCode=0 Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.873345 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.873426 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"87a0ad39b501359370db6c544f2b2e6f9fa3169b64a435086811401380995b49"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.873499 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.874222 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.874243 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0f8d998da1ad53bc0bcd45d36154c8261e665afb864096f96c13f26393058209"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.874285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.874311 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.874320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875246 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3" exitCode=0 Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875287 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875302 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"090f1228f3af49488034e676634498d89c038421f8c9502604c27d69d9e21e0e"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875353 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875837 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.875846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.876877 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.877914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.877959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.877971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878056 4663 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="888b980403f7d859cd62c368edf2d198a44fccef262045515d776ba4d9dc049d" exitCode=0 Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878117 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"888b980403f7d859cd62c368edf2d198a44fccef262045515d776ba4d9dc049d"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878152 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6902e7e37f27d26bb03c10e694ae8711124531d59e03bfd6c360d621169f07e3"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878226 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.878839 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.879910 4663 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4fd0fc85216032afb1427ac5d00359aac77c82bce5e8c30b3707aaf75c65ce9b" exitCode=0 Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.879994 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4fd0fc85216032afb1427ac5d00359aac77c82bce5e8c30b3707aaf75c65ce9b"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.880129 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"acc2a7135b44eb64052ee675e3ddf2abb1ebc3f774ab516e703050b5140620dd"} Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.880205 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.880914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.880937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:41 crc kubenswrapper[4663]: I1212 14:03:41.880946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:42 crc kubenswrapper[4663]: W1212 14:03:42.082075 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:42 crc kubenswrapper[4663]: E1212 14:03:42.082146 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:42 crc kubenswrapper[4663]: W1212 14:03:42.119411 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:42 crc kubenswrapper[4663]: E1212 14:03:42.119483 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:42 crc kubenswrapper[4663]: E1212 14:03:42.242594 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="1.6s" Dec 12 14:03:42 crc kubenswrapper[4663]: W1212 14:03:42.331672 4663 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.204:6443: connect: connection refused Dec 12 14:03:42 crc kubenswrapper[4663]: E1212 14:03:42.331782 4663 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.204:6443: connect: connection refused" logger="UnhandledError" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.432435 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.433404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.433448 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.433458 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.433479 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 12 14:03:42 crc kubenswrapper[4663]: E1212 14:03:42.433888 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.204:6443: connect: connection refused" node="crc" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.792629 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.882854 4663 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2dc49046eeae589d9db4806fcf97f292a52644db358d2ffa5a580cd51a6c3b26" exitCode=0 Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.882909 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2dc49046eeae589d9db4806fcf97f292a52644db358d2ffa5a580cd51a6c3b26"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.882994 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.883556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.883576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.883583 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.885552 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"eaba706e397747a3b362494fc815bd9899db690be915307abef9830c977dbf89"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.885635 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.886268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.886291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.886300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.887734 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.887778 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.887789 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.887841 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.888314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.888333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.888341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889105 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889123 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889133 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889155 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889572 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.889581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.890895 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.890924 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.890935 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.890947 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.890956 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a"} Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.891017 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.891502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.891531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:42 crc kubenswrapper[4663]: I1212 14:03:42.891540 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.894490 4663 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="eb54e8a7ab37aa62beb198ab9a87c98f0f5b1db2294d75f45243c26dd33b312d" exitCode=0 Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.894579 4663 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.894615 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.894657 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.894576 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"eb54e8a7ab37aa62beb198ab9a87c98f0f5b1db2294d75f45243c26dd33b312d"} Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.894917 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895310 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895338 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895942 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:43 crc kubenswrapper[4663]: I1212 14:03:43.895951 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.034874 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.035628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.035668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.035681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.035704 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.093202 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.495531 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899277 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4727466eb458e8710f9fb584db0464167e05dcd4cc60a8cf9df18e159c4a1a2"} Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899319 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"471a329a32c10c5a102840d532d422ab42c966454f013d46d47a09c785bb40f5"} Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899330 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"285c7a55f569c855fac2499e4f95315634c184780a5f1428dda49820eecb3420"} Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899340 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0b699bc774b74a54140951edb80a69749473643371e71ec5ebe0b188214d8cd8"} Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899349 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6e894a77d64a8ad2dc9c520dff0bb236f090d5c28c0f927a61b100ddafeedb45"} Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899343 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899387 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.899392 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900234 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900263 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.900995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:44 crc kubenswrapper[4663]: I1212 14:03:44.983191 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.187044 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.187121 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.187822 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.187845 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.187854 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.563665 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.901148 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.901221 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.902473 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.902528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.902542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.902835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.902886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:45 crc kubenswrapper[4663]: I1212 14:03:45.902901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.486092 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.486243 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.488379 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.488414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.488427 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.904090 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.904972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.905012 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:46 crc kubenswrapper[4663]: I1212 14:03:46.905020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.163208 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.431240 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.431427 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.432426 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.432460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.432470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.905696 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.906503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.906534 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:47 crc kubenswrapper[4663]: I1212 14:03:47.906543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.475301 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.475421 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.476193 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.476220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.476228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.613949 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.617674 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.909035 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.909703 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.909736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:49 crc kubenswrapper[4663]: I1212 14:03:49.909764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:50 crc kubenswrapper[4663]: I1212 14:03:50.911572 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:50 crc kubenswrapper[4663]: I1212 14:03:50.912579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:50 crc kubenswrapper[4663]: I1212 14:03:50.912616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:50 crc kubenswrapper[4663]: I1212 14:03:50.912626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:50 crc kubenswrapper[4663]: E1212 14:03:50.930555 4663 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 12 14:03:52 crc kubenswrapper[4663]: I1212 14:03:52.475563 4663 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 12 14:03:52 crc kubenswrapper[4663]: I1212 14:03:52.475637 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 12 14:03:52 crc kubenswrapper[4663]: I1212 14:03:52.620641 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 12 14:03:52 crc kubenswrapper[4663]: I1212 14:03:52.620688 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 12 14:03:52 crc kubenswrapper[4663]: I1212 14:03:52.623778 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 12 14:03:52 crc kubenswrapper[4663]: I1212 14:03:52.623814 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 12 14:03:54 crc kubenswrapper[4663]: I1212 14:03:54.496075 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 12 14:03:54 crc kubenswrapper[4663]: I1212 14:03:54.496124 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.001483 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.001606 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.002300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.002325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.002333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.009831 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.869793 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.870252 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.920129 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.921034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.921103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:55 crc kubenswrapper[4663]: I1212 14:03:55.921113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:56 crc kubenswrapper[4663]: I1212 14:03:56.489913 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:56 crc kubenswrapper[4663]: I1212 14:03:56.490217 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:56 crc kubenswrapper[4663]: I1212 14:03:56.491084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:56 crc kubenswrapper[4663]: I1212 14:03:56.491188 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:56 crc kubenswrapper[4663]: I1212 14:03:56.491259 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.434732 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.435307 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.435579 4663 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.435661 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.436249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.436344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.436404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.438115 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:57 crc kubenswrapper[4663]: E1212 14:03:57.613913 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.615860 4663 trace.go:236] Trace[1362700064]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (12-Dec-2025 14:03:44.833) (total time: 12782ms): Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[1362700064]: ---"Objects listed" error: 12782ms (14:03:57.615) Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[1362700064]: [12.78214065s] [12.78214065s] END Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.615880 4663 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.616345 4663 trace.go:236] Trace[1528984174]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (12-Dec-2025 14:03:43.783) (total time: 13832ms): Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[1528984174]: ---"Objects listed" error: 13832ms (14:03:57.616) Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[1528984174]: [13.832515702s] [13.832515702s] END Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.616364 4663 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 12 14:03:57 crc kubenswrapper[4663]: E1212 14:03:57.616791 4663 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.616870 4663 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.617385 4663 trace.go:236] Trace[1064920872]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (12-Dec-2025 14:03:45.280) (total time: 12336ms): Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[1064920872]: ---"Objects listed" error: 12336ms (14:03:57.617) Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[1064920872]: [12.336423467s] [12.336423467s] END Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.617509 4663 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.617546 4663 trace.go:236] Trace[680237718]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (12-Dec-2025 14:03:43.589) (total time: 14028ms): Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[680237718]: ---"Objects listed" error: 14027ms (14:03:57.616) Dec 12 14:03:57 crc kubenswrapper[4663]: Trace[680237718]: [14.028454438s] [14.028454438s] END Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.617658 4663 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.619460 4663 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.838512 4663 apiserver.go:52] "Watching apiserver" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.840479 4663 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.840671 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.840964 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.841028 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.841048 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.841071 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:03:57 crc kubenswrapper[4663]: E1212 14:03:57.841115 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.841325 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.841392 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:57 crc kubenswrapper[4663]: E1212 14:03:57.841496 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:03:57 crc kubenswrapper[4663]: E1212 14:03:57.841380 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.842843 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.843118 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.843444 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.844310 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.844320 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.844359 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.844710 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.844710 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.845302 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.862879 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.877612 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.895316 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.904213 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.911035 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.917398 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.923869 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.924319 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.925691 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4" exitCode=255 Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.925726 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4"} Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.931342 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.931882 4663 scope.go:117] "RemoveContainer" containerID="115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.932150 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.936716 4663 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.937389 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.944574 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.950923 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.963193 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.971200 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:57 crc kubenswrapper[4663]: I1212 14:03:57.978361 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019453 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019486 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019502 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019517 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019533 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019547 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019570 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019585 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019601 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019616 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019629 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019644 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019657 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019671 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019684 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019723 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019779 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019808 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019833 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019883 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.019969 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020090 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020152 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020168 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020175 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020232 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020318 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020465 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.020493 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:03:58.52023132 +0000 UTC m=+17.945649374 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020480 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020669 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020493 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020696 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020714 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020718 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020932 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020733 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020962 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020957 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020981 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020943 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020942 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.020999 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021015 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021029 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021043 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021057 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021072 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021088 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021120 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021134 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021138 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021147 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021157 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021193 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021249 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021275 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021286 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021302 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021315 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021327 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021342 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021357 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021371 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021387 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021401 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021413 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021426 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021440 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021453 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021466 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021482 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021498 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021513 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021528 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021545 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021558 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021573 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021588 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021603 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021617 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021631 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021644 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021658 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021671 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021689 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021704 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021718 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021732 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021761 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021775 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021788 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021804 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021817 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021864 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021879 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021904 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021938 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021952 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021965 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021981 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021995 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022008 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022022 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022038 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022051 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022065 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022079 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022093 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022121 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022136 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022151 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022165 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022180 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022194 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022211 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022227 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022241 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022255 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022270 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022284 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022297 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022311 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022326 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022342 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022359 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022373 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022387 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022402 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022432 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022447 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022461 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022477 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022493 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022507 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022521 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022538 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022553 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022568 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022584 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022598 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022612 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022627 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022641 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022655 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022669 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022687 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022703 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022717 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022732 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022764 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022782 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022797 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022811 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022828 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022843 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022858 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022876 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022892 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022906 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022923 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022939 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022957 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022973 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022988 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023003 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023019 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023033 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023049 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023064 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023080 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023094 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023121 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023137 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023152 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023168 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023183 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023202 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023218 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023235 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023249 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023264 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023279 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023295 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023312 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023327 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023347 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023363 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023378 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023395 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023409 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023424 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023442 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023456 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023471 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023486 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023502 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023517 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023533 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023549 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023564 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023580 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023597 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023612 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023627 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023642 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023657 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023674 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023689 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023704 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023723 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023755 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023774 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023789 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023804 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023819 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023855 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023891 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023908 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023925 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023943 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023961 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023977 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023998 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024015 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024033 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024049 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024069 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024084 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024111 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024161 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024171 4663 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024182 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024193 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024203 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024211 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024219 4663 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024228 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024237 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024247 4663 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024256 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024265 4663 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024276 4663 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024285 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024294 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024303 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024312 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024320 4663 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024329 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024338 4663 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028001 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.030321 4663 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032239 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.033783 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021338 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021474 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021472 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021591 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021605 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021692 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021800 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021856 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.021863 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022042 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022397 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022411 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022370 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022432 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022455 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.034696 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022460 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022687 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022716 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022735 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022767 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.022810 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023110 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023140 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023204 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023256 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023333 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023410 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023524 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023553 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023721 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023737 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023770 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.023826 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024271 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024303 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024330 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024357 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024566 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024452 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.024707 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025016 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025269 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025379 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025540 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025608 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025638 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025799 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025840 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.025991 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026280 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026332 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026398 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026538 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026605 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026850 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026853 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026956 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.026963 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027111 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027225 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027259 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027363 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027427 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027515 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027527 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027548 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027675 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027682 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027767 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027787 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027845 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.027920 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028001 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028447 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028511 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028599 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028608 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028727 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.028855 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.029523 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.029542 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.029633 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.029634 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.029829 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.029980 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.030029 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.030173 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.030260 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.030959 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031060 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031087 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031505 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031514 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031555 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031680 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.031733 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031811 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.031913 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032017 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.032032 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032050 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032134 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032217 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032234 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032309 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032469 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032394 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032696 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032723 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032843 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.032938 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.033126 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.033231 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.033231 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.033309 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.033953 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.034195 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.034455 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.034512 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.034840 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.034173 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.035251 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.035337 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:58.5353242 +0000 UTC m=+17.960742254 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.035363 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:58.535348166 +0000 UTC m=+17.960766209 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.035801 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.035875 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.035886 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.036443 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.037125 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.037262 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037334 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037352 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037352 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037364 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037369 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037379 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037412 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:58.537399257 +0000 UTC m=+17.962817311 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.037426 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:58.537420809 +0000 UTC m=+17.962838863 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.037448 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.037655 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038410 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038462 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038516 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038523 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038570 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038839 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.038986 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.039086 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.039288 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.040351 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.040581 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.040808 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.041372 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.041811 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.041985 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.042481 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.042575 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.042641 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.042700 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.042639 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.042933 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.043125 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.043422 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.043448 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.043681 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.043721 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.043729 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.044934 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.046894 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.046927 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.047273 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.047501 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.047624 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.048622 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.048772 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.048940 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049370 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049415 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049423 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049393 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049476 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049504 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.049816 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.050970 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.051514 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.051652 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.067279 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.073952 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.075193 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.076294 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125189 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125230 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125272 4663 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125281 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125290 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125299 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125306 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125314 4663 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125322 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125329 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125328 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125337 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125410 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125422 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125435 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125446 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125437 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125456 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125513 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125525 4663 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125537 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125547 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125557 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125566 4663 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125575 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125584 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125593 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125602 4663 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125611 4663 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125621 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125630 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125640 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125649 4663 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125659 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125668 4663 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125677 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125685 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125694 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125703 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125712 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125721 4663 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125730 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125739 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125770 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125780 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125789 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125797 4663 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125807 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125816 4663 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125826 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125836 4663 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125847 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125857 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125866 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125876 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125886 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125894 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125903 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125912 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125921 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125930 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125940 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125948 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125957 4663 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125966 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125976 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125985 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.125994 4663 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126003 4663 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126012 4663 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126022 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126032 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126042 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126051 4663 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126060 4663 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126069 4663 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126078 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126086 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126095 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126114 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126125 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126133 4663 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126142 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126151 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126159 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126168 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126179 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126187 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126195 4663 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126202 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126211 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126218 4663 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126225 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126232 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126241 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126248 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126255 4663 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126266 4663 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126274 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126282 4663 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126290 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126297 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126304 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126311 4663 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126318 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126325 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126332 4663 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126339 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126346 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126353 4663 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126360 4663 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126368 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126375 4663 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126390 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126397 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126405 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126412 4663 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126419 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126426 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126433 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126441 4663 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126448 4663 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126456 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126464 4663 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126470 4663 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126477 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126484 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126491 4663 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126499 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126507 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126515 4663 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126522 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126530 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126537 4663 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126544 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126552 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126560 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126567 4663 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126575 4663 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126583 4663 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126592 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126599 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126607 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126614 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126627 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126634 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126642 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126649 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126657 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126665 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126672 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126680 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126687 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126694 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126701 4663 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126708 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126715 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126722 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126730 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126737 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126759 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126768 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126775 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126783 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126790 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126798 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126805 4663 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126813 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126820 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126828 4663 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126835 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126842 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126849 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126856 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126864 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126870 4663 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126877 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.126884 4663 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.150415 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.157223 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 12 14:03:58 crc kubenswrapper[4663]: W1212 14:03:58.158089 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-f0d4099c5f3518febc576d952539306490f68503bb0d2f39a7361ebf9fd914b7 WatchSource:0}: Error finding container f0d4099c5f3518febc576d952539306490f68503bb0d2f39a7361ebf9fd914b7: Status 404 returned error can't find the container with id f0d4099c5f3518febc576d952539306490f68503bb0d2f39a7361ebf9fd914b7 Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.160966 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.528690 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.528853 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:03:59.528840606 +0000 UTC m=+18.954258661 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.630037 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.630073 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.630103 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.630129 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630178 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630209 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630214 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630234 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630244 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630253 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:59.630238519 +0000 UTC m=+19.055656573 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630272 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:59.630263477 +0000 UTC m=+19.055681531 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630286 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:59.63028021 +0000 UTC m=+19.055698263 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630540 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630558 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630567 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: E1212 14:03:58.630610 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:03:59.630602594 +0000 UTC m=+19.056020648 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.872517 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.872992 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.873638 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.874256 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.874787 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.875218 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.875701 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.876201 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.876722 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.877187 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.877604 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.878208 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.878646 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.879104 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.879561 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.880027 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.880485 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.880840 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.883686 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.884271 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.884994 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.885477 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.885854 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.886717 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.887078 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.887969 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.888499 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.889244 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.889711 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.890490 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.890896 4663 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.890986 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.892706 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.893145 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.893486 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.894766 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.895602 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.896079 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.896934 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.897607 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.898181 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.898740 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.899442 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.900726 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.901168 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.901634 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.902450 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.903422 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.903850 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.904263 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.905027 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.905463 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.906299 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.906713 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.929339 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.929370 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.929381 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"65195d570199bb2b5fe2c4a481268e19a8406b2e01d83f0a8299d2338596b16a"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.930468 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fb481c79015c91c9d2fea9163d17a8aac3e3115baf62157838a38d6046528f38"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.931490 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.931514 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f0d4099c5f3518febc576d952539306490f68503bb0d2f39a7361ebf9fd914b7"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.936468 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.937647 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a"} Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.938015 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.941096 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.950652 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.959434 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.967685 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.975661 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.985445 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:58 crc kubenswrapper[4663]: I1212 14:03:58.993783 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:58Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.003089 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.025786 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.045032 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.058097 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.067218 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.077390 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.087648 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.480639 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.483735 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.486108 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.490276 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.497386 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.505583 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.515680 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.525556 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.534579 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.536487 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.536560 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:04:01.53654474 +0000 UTC m=+20.961962794 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.543217 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.551303 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.559165 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.567588 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.575844 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.583847 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.591951 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.600429 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.608413 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:03:59Z is after 2025-08-24T17:21:41Z" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.636995 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.637039 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.637061 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.637079 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637132 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637197 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637204 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:01.637190809 +0000 UTC m=+21.062608873 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637209 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637219 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637231 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637244 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:01.637236456 +0000 UTC m=+21.062654510 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637248 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637369 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637385 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637432 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:01.637324244 +0000 UTC m=+21.062742298 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.637448 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:01.637438261 +0000 UTC m=+21.062856316 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.869821 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.869842 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:03:59 crc kubenswrapper[4663]: I1212 14:03:59.869885 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.869927 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.869996 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:03:59 crc kubenswrapper[4663]: E1212 14:03:59.870056 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.816928 4663 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.818200 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.818227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.818235 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.818263 4663 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.822675 4663 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.822822 4663 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.823344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.823367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.823380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.823390 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.823399 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:00Z","lastTransitionTime":"2025-12-12T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:00 crc kubenswrapper[4663]: E1212 14:04:00.858700 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.860982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.861004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.861012 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.861023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.861032 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:00Z","lastTransitionTime":"2025-12-12T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:00 crc kubenswrapper[4663]: E1212 14:04:00.870671 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.873124 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.873149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.873157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.873174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.873183 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:00Z","lastTransitionTime":"2025-12-12T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.878613 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: E1212 14:04:00.880833 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.882754 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.882778 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.882787 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.882795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.882802 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:00Z","lastTransitionTime":"2025-12-12T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.886924 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: E1212 14:04:00.890770 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.892556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.892579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.892588 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.892597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.892605 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:00Z","lastTransitionTime":"2025-12-12T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.894368 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: E1212 14:04:00.899861 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: E1212 14:04:00.899958 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.900825 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.900847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.900856 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.900865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.900871 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:00Z","lastTransitionTime":"2025-12-12T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.902848 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.911450 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.918962 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.925850 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.934722 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.941035 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80"} Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.951403 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.960064 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.967950 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.975308 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.982622 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.990056 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:00 crc kubenswrapper[4663]: I1212 14:04:00.997273 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.002612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.002643 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.002653 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.002665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.002673 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.006575 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.104200 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.104228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.104236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.104249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.104257 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.205928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.205972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.205981 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.205994 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.206004 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.307206 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.307238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.307248 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.307259 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.307268 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.409266 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.409302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.409312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.409324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.409332 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.511076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.511103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.511111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.511120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.511127 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.548396 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.548536 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:04:05.548522324 +0000 UTC m=+24.973940378 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.612785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.612821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.612830 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.612841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.612849 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.649059 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.649087 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.649108 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.649122 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649205 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649234 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649211 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649261 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649269 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649242 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649251 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:05.649240231 +0000 UTC m=+25.074658285 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649294 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649302 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:05.649294232 +0000 UTC m=+25.074712287 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649306 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649312 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:05.649307448 +0000 UTC m=+25.074725503 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.649340 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:05.649327486 +0000 UTC m=+25.074745550 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.714646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.714673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.714683 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.714693 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.714701 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.816961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.816993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.817001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.817013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.817023 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.870024 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.870063 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.870111 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.870034 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.870250 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:01 crc kubenswrapper[4663]: E1212 14:04:01.870319 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.918461 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.918489 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.918498 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.918511 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:01 crc kubenswrapper[4663]: I1212 14:04:01.918518 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:01Z","lastTransitionTime":"2025-12-12T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.020850 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.020889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.020900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.020915 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.020925 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.122656 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.122687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.122697 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.122708 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.122716 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.224516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.224554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.224565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.224579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.224588 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.326716 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.326763 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.326772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.326784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.326794 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.428733 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.428783 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.428794 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.428808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.428818 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.530466 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.530497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.530507 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.530519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.530527 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.632340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.632374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.632383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.632395 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.632404 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.734100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.734128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.734138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.734150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.734158 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.835987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.836017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.836027 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.836037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.836045 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.937927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.937965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.937973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.937986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:02 crc kubenswrapper[4663]: I1212 14:04:02.937994 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:02Z","lastTransitionTime":"2025-12-12T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.039610 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.039639 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.039648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.039659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.039668 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.141487 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.141519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.141526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.141538 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.141546 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.243370 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.243403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.243412 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.243426 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.243434 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.345081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.345112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.345120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.345133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.345142 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.446969 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.447005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.447013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.447027 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.447036 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.532512 4663 csr.go:261] certificate signing request csr-2cjj5 is approved, waiting to be issued Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.533641 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-sqmhz"] Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.533893 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.535439 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.535442 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.535678 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.535721 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.536379 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-dr24h"] Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.536552 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.542276 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.542637 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.542647 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.548872 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.548894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.548903 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.548918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.548926 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.551195 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.553571 4663 csr.go:257] certificate signing request csr-2cjj5 is issued Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.562962 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.575624 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.587991 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.596360 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.609404 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.618306 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.627455 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.631616 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-f2n4r"] Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.631863 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.642190 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.642254 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.642329 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.642422 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.643003 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.649761 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.650871 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.650898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.650907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.650918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.650926 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.662131 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.662445 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m24pq\" (UniqueName: \"kubernetes.io/projected/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-kube-api-access-m24pq\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.662474 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9spgg\" (UniqueName: \"kubernetes.io/projected/9bfcf4b6-a84c-4c32-a79e-a17bf8776478-kube-api-access-9spgg\") pod \"node-resolver-dr24h\" (UID: \"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\") " pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.662496 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-serviceca\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.662511 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9bfcf4b6-a84c-4c32-a79e-a17bf8776478-hosts-file\") pod \"node-resolver-dr24h\" (UID: \"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\") " pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.662527 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-host\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.679900 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.689698 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.698495 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.706901 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.715587 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.725096 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.740135 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.749985 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.752494 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.752516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.752525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.752537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.752546 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.759872 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763670 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-cnibin\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763704 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-multus-certs\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763725 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-host\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763743 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-socket-dir-parent\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763770 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-netns\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763785 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-cni-bin\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763797 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-etc-kubernetes\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763817 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-cni-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763830 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-os-release\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763842 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-kubelet\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763860 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/262620a9-ff94-42e0-a449-6c1a022f9b77-cni-binary-copy\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763873 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-conf-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763888 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-k8s-cni-cncf-io\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763908 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m24pq\" (UniqueName: \"kubernetes.io/projected/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-kube-api-access-m24pq\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763921 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-cni-multus\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763936 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9spgg\" (UniqueName: \"kubernetes.io/projected/9bfcf4b6-a84c-4c32-a79e-a17bf8776478-kube-api-access-9spgg\") pod \"node-resolver-dr24h\" (UID: \"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\") " pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763949 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-hostroot\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763964 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gh6f\" (UniqueName: \"kubernetes.io/projected/262620a9-ff94-42e0-a449-6c1a022f9b77-kube-api-access-8gh6f\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.763988 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9bfcf4b6-a84c-4c32-a79e-a17bf8776478-hosts-file\") pod \"node-resolver-dr24h\" (UID: \"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\") " pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.764001 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-system-cni-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.764015 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-daemon-config\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.764035 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-serviceca\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.764664 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-host\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.764729 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-serviceca\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.764919 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9bfcf4b6-a84c-4c32-a79e-a17bf8776478-hosts-file\") pod \"node-resolver-dr24h\" (UID: \"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\") " pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.769892 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:03Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.778601 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m24pq\" (UniqueName: \"kubernetes.io/projected/aeda098f-80ee-41c3-b54d-fa390fd0e3ec-kube-api-access-m24pq\") pod \"node-ca-sqmhz\" (UID: \"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\") " pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.779194 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9spgg\" (UniqueName: \"kubernetes.io/projected/9bfcf4b6-a84c-4c32-a79e-a17bf8776478-kube-api-access-9spgg\") pod \"node-resolver-dr24h\" (UID: \"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\") " pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.844276 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-sqmhz" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.848144 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-dr24h" Dec 12 14:04:03 crc kubenswrapper[4663]: W1212 14:04:03.854147 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaeda098f_80ee_41c3_b54d_fa390fd0e3ec.slice/crio-2acb733d8d1d3e0c4f3c980a98a5e1edd7de960800973b74b82d47f0d2b8a11e WatchSource:0}: Error finding container 2acb733d8d1d3e0c4f3c980a98a5e1edd7de960800973b74b82d47f0d2b8a11e: Status 404 returned error can't find the container with id 2acb733d8d1d3e0c4f3c980a98a5e1edd7de960800973b74b82d47f0d2b8a11e Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.854487 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.854516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.854526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.854538 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.854546 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: W1212 14:04:03.857147 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bfcf4b6_a84c_4c32_a79e_a17bf8776478.slice/crio-c90a6e1d994bd4a2612349a187961d7655a89c8f54aa7949fca96ff8d66e09fa WatchSource:0}: Error finding container c90a6e1d994bd4a2612349a187961d7655a89c8f54aa7949fca96ff8d66e09fa: Status 404 returned error can't find the container with id c90a6e1d994bd4a2612349a187961d7655a89c8f54aa7949fca96ff8d66e09fa Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864827 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-netns\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864857 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-cni-bin\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864872 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-etc-kubernetes\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864895 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-socket-dir-parent\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864909 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-os-release\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864930 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-kubelet\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864950 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-cni-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864965 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-conf-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864979 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/262620a9-ff94-42e0-a449-6c1a022f9b77-cni-binary-copy\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864989 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-netns\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865014 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-kubelet\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865028 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-k8s-cni-cncf-io\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865021 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-cni-bin\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865046 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-cni-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865047 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-conf-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865068 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-os-release\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865075 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-socket-dir-parent\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864993 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-k8s-cni-cncf-io\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.864988 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-etc-kubernetes\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865133 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-cni-multus\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865151 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-hostroot\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865165 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gh6f\" (UniqueName: \"kubernetes.io/projected/262620a9-ff94-42e0-a449-6c1a022f9b77-kube-api-access-8gh6f\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865180 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-system-cni-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865186 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-var-lib-cni-multus\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865194 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-daemon-config\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865613 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/262620a9-ff94-42e0-a449-6c1a022f9b77-multus-daemon-config\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865620 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-multus-certs\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865520 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/262620a9-ff94-42e0-a449-6c1a022f9b77-cni-binary-copy\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865223 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-hostroot\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865650 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-cnibin\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865663 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-host-run-multus-certs\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865350 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-system-cni-dir\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.865693 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/262620a9-ff94-42e0-a449-6c1a022f9b77-cnibin\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.870531 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:03 crc kubenswrapper[4663]: E1212 14:04:03.870613 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.870665 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:03 crc kubenswrapper[4663]: E1212 14:04:03.870705 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.870740 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:03 crc kubenswrapper[4663]: E1212 14:04:03.870823 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.878924 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gh6f\" (UniqueName: \"kubernetes.io/projected/262620a9-ff94-42e0-a449-6c1a022f9b77-kube-api-access-8gh6f\") pod \"multus-f2n4r\" (UID: \"262620a9-ff94-42e0-a449-6c1a022f9b77\") " pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.941213 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-f2n4r" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.950971 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-sqmhz" event={"ID":"aeda098f-80ee-41c3-b54d-fa390fd0e3ec","Type":"ContainerStarted","Data":"2acb733d8d1d3e0c4f3c980a98a5e1edd7de960800973b74b82d47f0d2b8a11e"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.952225 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dr24h" event={"ID":"9bfcf4b6-a84c-4c32-a79e-a17bf8776478","Type":"ContainerStarted","Data":"c90a6e1d994bd4a2612349a187961d7655a89c8f54aa7949fca96ff8d66e09fa"} Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.956357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.956385 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.956394 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.956406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:03 crc kubenswrapper[4663]: I1212 14:04:03.956415 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:03Z","lastTransitionTime":"2025-12-12T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:03 crc kubenswrapper[4663]: W1212 14:04:03.961526 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod262620a9_ff94_42e0_a449_6c1a022f9b77.slice/crio-426d4134aec814508020d2a640cc43fe1b4cf12cc713597716d54dae6e101fe4 WatchSource:0}: Error finding container 426d4134aec814508020d2a640cc43fe1b4cf12cc713597716d54dae6e101fe4: Status 404 returned error can't find the container with id 426d4134aec814508020d2a640cc43fe1b4cf12cc713597716d54dae6e101fe4 Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.014257 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-kgwm2"] Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.014731 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.020469 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-kvqpx"] Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.021265 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.022625 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.022853 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.023031 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.023238 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.023358 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.023408 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.027128 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.034823 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.056694 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.058109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.058140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.058150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.058162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.058170 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.087009 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.098909 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.105593 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.113914 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.121634 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.129384 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.136371 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.148912 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.159524 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.160544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.160576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.160585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.160599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.160607 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.167785 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/74a58ec6-61d4-48ec-ad74-739c778f89b5-rootfs\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.167922 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-os-release\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168041 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74a58ec6-61d4-48ec-ad74-739c778f89b5-proxy-tls\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168169 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdxxz\" (UniqueName: \"kubernetes.io/projected/74a58ec6-61d4-48ec-ad74-739c778f89b5-kube-api-access-bdxxz\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168287 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-cnibin\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168394 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e41f4560-4fe0-4b87-bb7b-a3055733abba-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168514 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr9dw\" (UniqueName: \"kubernetes.io/projected/e41f4560-4fe0-4b87-bb7b-a3055733abba-kube-api-access-vr9dw\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168629 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e41f4560-4fe0-4b87-bb7b-a3055733abba-cni-binary-copy\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168737 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74a58ec6-61d4-48ec-ad74-739c778f89b5-mcd-auth-proxy-config\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168871 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-system-cni-dir\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.168972 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.170173 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.178869 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.187211 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.195606 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.205104 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.214889 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.223604 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.231279 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.239965 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.256516 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.262770 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.262900 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.262991 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.263069 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.263156 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.268400 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269569 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/74a58ec6-61d4-48ec-ad74-739c778f89b5-rootfs\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269601 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-os-release\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269627 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74a58ec6-61d4-48ec-ad74-739c778f89b5-proxy-tls\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269660 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdxxz\" (UniqueName: \"kubernetes.io/projected/74a58ec6-61d4-48ec-ad74-739c778f89b5-kube-api-access-bdxxz\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269676 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-cnibin\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269692 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e41f4560-4fe0-4b87-bb7b-a3055733abba-cni-binary-copy\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269697 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/74a58ec6-61d4-48ec-ad74-739c778f89b5-rootfs\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269706 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e41f4560-4fe0-4b87-bb7b-a3055733abba-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269787 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr9dw\" (UniqueName: \"kubernetes.io/projected/e41f4560-4fe0-4b87-bb7b-a3055733abba-kube-api-access-vr9dw\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269816 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74a58ec6-61d4-48ec-ad74-739c778f89b5-mcd-auth-proxy-config\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269835 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-system-cni-dir\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269848 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.269986 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.270224 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e41f4560-4fe0-4b87-bb7b-a3055733abba-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.270308 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-os-release\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.270638 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74a58ec6-61d4-48ec-ad74-739c778f89b5-mcd-auth-proxy-config\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.270684 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-system-cni-dir\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.270688 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e41f4560-4fe0-4b87-bb7b-a3055733abba-cnibin\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.271271 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e41f4560-4fe0-4b87-bb7b-a3055733abba-cni-binary-copy\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.272695 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74a58ec6-61d4-48ec-ad74-739c778f89b5-proxy-tls\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.282759 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.286635 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdxxz\" (UniqueName: \"kubernetes.io/projected/74a58ec6-61d4-48ec-ad74-739c778f89b5-kube-api-access-bdxxz\") pod \"machine-config-daemon-kgwm2\" (UID: \"74a58ec6-61d4-48ec-ad74-739c778f89b5\") " pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.291405 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr9dw\" (UniqueName: \"kubernetes.io/projected/e41f4560-4fe0-4b87-bb7b-a3055733abba-kube-api-access-vr9dw\") pod \"multus-additional-cni-plugins-kvqpx\" (UID: \"e41f4560-4fe0-4b87-bb7b-a3055733abba\") " pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.293957 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.310525 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.324742 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.331738 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" Dec 12 14:04:04 crc kubenswrapper[4663]: W1212 14:04:04.332982 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74a58ec6_61d4_48ec_ad74_739c778f89b5.slice/crio-da423676049a0cecf86c39ed4a1d0d71874cd35e8e139b7393e084320b9d00c2 WatchSource:0}: Error finding container da423676049a0cecf86c39ed4a1d0d71874cd35e8e139b7393e084320b9d00c2: Status 404 returned error can't find the container with id da423676049a0cecf86c39ed4a1d0d71874cd35e8e139b7393e084320b9d00c2 Dec 12 14:04:04 crc kubenswrapper[4663]: W1212 14:04:04.342025 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode41f4560_4fe0_4b87_bb7b_a3055733abba.slice/crio-ddd6178a4c4b5436efd46d44ff7cb32365e696dcd4711fc4bd5380928f574c63 WatchSource:0}: Error finding container ddd6178a4c4b5436efd46d44ff7cb32365e696dcd4711fc4bd5380928f574c63: Status 404 returned error can't find the container with id ddd6178a4c4b5436efd46d44ff7cb32365e696dcd4711fc4bd5380928f574c63 Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.364882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.365139 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.365268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.365396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.365520 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.373814 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rz99s"] Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.374519 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.375728 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.376081 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.376375 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.376550 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.377066 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.377301 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.377560 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.384690 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.391928 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.400599 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.409281 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.417382 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.425040 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.431816 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.439543 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.450206 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.463501 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.467477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.467506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.467515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.467527 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.467536 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.472923 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.483341 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.499347 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.524332 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.554400 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-12 13:59:03 +0000 UTC, rotation deadline is 2026-10-18 06:14:27.747953109 +0000 UTC Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.554473 4663 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7432h10m23.193483136s for next certificate rotation Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.569673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.569703 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.569736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.569772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.569781 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572020 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-systemd\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572052 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-env-overrides\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572071 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-systemd-units\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572086 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-var-lib-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572102 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-netd\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572118 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-config\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572136 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-log-socket\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572186 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572260 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-ovn-kubernetes\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572310 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-node-log\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572326 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-kubelet\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572356 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-ovn\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572372 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572388 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-slash\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572403 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-bin\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572426 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-etc-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572484 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-script-lib\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572530 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6skt\" (UniqueName: \"kubernetes.io/projected/89a20840-03d2-434f-a5a1-3e71288c3ec5-kube-api-access-k6skt\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572594 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-netns\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.572620 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovn-node-metrics-cert\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.671406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.671440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.671451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.671464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.671474 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673733 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-env-overrides\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673780 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-systemd-units\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673802 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-var-lib-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673820 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-netd\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673834 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-config\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673848 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-log-socket\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673862 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673879 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-ovn-kubernetes\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673895 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-node-log\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673895 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-var-lib-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673907 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-kubelet\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673939 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-kubelet\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673950 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-ovn\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673962 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-netd\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673986 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673970 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-log-socket\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673988 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674006 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-ovn\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674010 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-node-log\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673933 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-systemd-units\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.673971 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674031 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-ovn-kubernetes\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674115 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-slash\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674132 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-bin\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674147 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-etc-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674146 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-slash\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674169 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-script-lib\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674177 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-env-overrides\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674185 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6skt\" (UniqueName: \"kubernetes.io/projected/89a20840-03d2-434f-a5a1-3e71288c3ec5-kube-api-access-k6skt\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674182 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-bin\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674202 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-etc-openvswitch\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674206 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-netns\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674223 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-netns\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674251 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovn-node-metrics-cert\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674285 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-systemd\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674324 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-systemd\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674383 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-config\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.674612 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-script-lib\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.676826 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovn-node-metrics-cert\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.686812 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6skt\" (UniqueName: \"kubernetes.io/projected/89a20840-03d2-434f-a5a1-3e71288c3ec5-kube-api-access-k6skt\") pod \"ovnkube-node-rz99s\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.773077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.773113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.773122 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.773135 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.773143 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.875436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.875467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.875476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.875487 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.875499 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.958657 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.958876 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.958887 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"da423676049a0cecf86c39ed4a1d0d71874cd35e8e139b7393e084320b9d00c2"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.959646 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerStarted","Data":"8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.959686 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerStarted","Data":"426d4134aec814508020d2a640cc43fe1b4cf12cc713597716d54dae6e101fe4"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.960985 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-sqmhz" event={"ID":"aeda098f-80ee-41c3-b54d-fa390fd0e3ec","Type":"ContainerStarted","Data":"cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.962712 4663 generic.go:334] "Generic (PLEG): container finished" podID="e41f4560-4fe0-4b87-bb7b-a3055733abba" containerID="f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503" exitCode=0 Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.962779 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerDied","Data":"f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.962814 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerStarted","Data":"ddd6178a4c4b5436efd46d44ff7cb32365e696dcd4711fc4bd5380928f574c63"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.964052 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-dr24h" event={"ID":"9bfcf4b6-a84c-4c32-a79e-a17bf8776478","Type":"ContainerStarted","Data":"17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.966709 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.975304 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.979717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.979762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.979772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.979785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.979798 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:04Z","lastTransitionTime":"2025-12-12T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.984359 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.986520 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:04 crc kubenswrapper[4663]: W1212 14:04:04.996468 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89a20840_03d2_434f_a5a1_3e71288c3ec5.slice/crio-49d6ebc54ef9d8b27a907b311e2478143db3e78f72bcdcdcbf2c1a7b4da9dcc7 WatchSource:0}: Error finding container 49d6ebc54ef9d8b27a907b311e2478143db3e78f72bcdcdcbf2c1a7b4da9dcc7: Status 404 returned error can't find the container with id 49d6ebc54ef9d8b27a907b311e2478143db3e78f72bcdcdcbf2c1a7b4da9dcc7 Dec 12 14:04:04 crc kubenswrapper[4663]: I1212 14:04:04.996645 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:04Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.005814 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.013843 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.022055 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.029553 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.037541 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.045902 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.054399 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.062422 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.068638 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.082172 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.082298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.082338 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.082354 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.082363 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.100426 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.142869 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.182032 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.184138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.184166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.184174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.184187 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.184196 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.223002 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.261141 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.286695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.286724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.286734 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.286757 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.286766 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.300858 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.341168 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.386919 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.388420 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.388447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.388455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.388467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.388475 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.426871 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.461473 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.490068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.490092 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.490100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.490111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.490120 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.501413 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.541694 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.580152 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.582501 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.582683 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:04:13.582661876 +0000 UTC m=+33.008079930 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.591761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.591795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.591806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.591819 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.591835 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.622112 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.663125 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.683576 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.683613 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.683634 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.683655 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683709 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683737 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683761 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683765 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683782 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683792 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683804 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:13.683791743 +0000 UTC m=+33.109209796 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683823 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683837 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:13.683826248 +0000 UTC m=+33.109244302 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683765 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683865 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:13.683859421 +0000 UTC m=+33.109277475 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.683876 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:13.683871574 +0000 UTC m=+33.109289628 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.693700 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.693728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.693736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.693764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.693773 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.795417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.795454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.795464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.795476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.795485 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.870085 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.870138 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.870102 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.870199 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.870335 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:05 crc kubenswrapper[4663]: E1212 14:04:05.870415 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.896738 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.896788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.896803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.896816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.896826 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.967959 4663 generic.go:334] "Generic (PLEG): container finished" podID="e41f4560-4fe0-4b87-bb7b-a3055733abba" containerID="fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1" exitCode=0 Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.968030 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerDied","Data":"fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.970017 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" exitCode=0 Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.970094 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.970135 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"49d6ebc54ef9d8b27a907b311e2478143db3e78f72bcdcdcbf2c1a7b4da9dcc7"} Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.981918 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.991793 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.998591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.998612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.998623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.998635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:05 crc kubenswrapper[4663]: I1212 14:04:05.998643 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:05Z","lastTransitionTime":"2025-12-12T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.000716 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:05Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.009419 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.016182 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.023388 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.035702 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.044863 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.052474 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.060690 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.100371 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.100406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.100414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.100427 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.100435 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.101250 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.141252 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.181352 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.202641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.202671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.202679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.202691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.202700 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.223932 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.260804 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.300643 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.304171 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.304203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.304213 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.304226 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.304235 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.342715 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.384859 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.406539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.406570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.406579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.406591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.406598 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.422587 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.461201 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.510591 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.510626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.510635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.510648 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.510660 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.519496 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.544917 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.580736 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.612491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.612522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.612530 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.612542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.612551 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.621564 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.661273 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.701674 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.713858 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.713883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.713891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.713903 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.713911 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.741805 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.781809 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.815435 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.815492 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.815502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.815514 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.815523 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.917039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.917064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.917072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.917083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.917091 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:06Z","lastTransitionTime":"2025-12-12T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.974090 4663 generic.go:334] "Generic (PLEG): container finished" podID="e41f4560-4fe0-4b87-bb7b-a3055733abba" containerID="ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7" exitCode=0 Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.974143 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerDied","Data":"ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.976582 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.976763 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.976774 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.976783 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.976792 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.976801 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.985433 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:06 crc kubenswrapper[4663]: I1212 14:04:06.995439 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:06Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.007550 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.014480 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.018946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.018979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.018988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.019001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.019009 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.022188 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.032337 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.065178 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.100803 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.121168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.121197 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.121206 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.121219 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.121227 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.140601 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.181699 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.221623 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.222829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.222859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.222870 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.222883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.222891 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.259937 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.301540 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.324512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.324551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.324559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.324572 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.324581 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.341703 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.427043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.427077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.427087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.427098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.427107 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.529001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.529029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.529039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.529052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.529062 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.630694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.630719 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.630728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.630738 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.630759 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.732812 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.732836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.732843 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.732854 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.732862 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.835086 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.835113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.835121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.835131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.835139 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.869491 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.869513 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.869529 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:07 crc kubenswrapper[4663]: E1212 14:04:07.869568 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:07 crc kubenswrapper[4663]: E1212 14:04:07.869628 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:07 crc kubenswrapper[4663]: E1212 14:04:07.869677 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.936637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.936661 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.936670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.936678 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.936686 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:07Z","lastTransitionTime":"2025-12-12T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.980623 4663 generic.go:334] "Generic (PLEG): container finished" podID="e41f4560-4fe0-4b87-bb7b-a3055733abba" containerID="c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6" exitCode=0 Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.980651 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerDied","Data":"c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6"} Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.990608 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:07 crc kubenswrapper[4663]: I1212 14:04:07.999725 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:07Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.007905 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.014858 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.022238 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.029677 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.037203 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.038026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.038051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.038060 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.038072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.038080 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.044586 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.052120 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.061444 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.078647 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.087148 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.094705 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.103079 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.140089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.140117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.140126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.140138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.140146 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.242047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.242073 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.242082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.242091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.242100 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.344154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.344195 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.344204 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.344217 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.344227 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.445620 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.445646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.445655 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.445667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.445675 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.548083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.548121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.548130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.548144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.548152 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.650312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.650342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.650351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.650362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.650371 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.752074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.752113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.752124 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.752138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.752146 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.855156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.855199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.855209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.855225 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.855238 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.957101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.957133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.957141 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.957153 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.957161 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:08Z","lastTransitionTime":"2025-12-12T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.986213 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.988223 4663 generic.go:334] "Generic (PLEG): container finished" podID="e41f4560-4fe0-4b87-bb7b-a3055733abba" containerID="7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469" exitCode=0 Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.988257 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerDied","Data":"7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469"} Dec 12 14:04:08 crc kubenswrapper[4663]: I1212 14:04:08.996636 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:08Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.006229 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.015437 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.022837 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.030201 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.038433 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.045760 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.053377 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.058532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.058558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.058569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.058582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.058590 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.059830 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.067878 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.077514 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.089227 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.097344 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.105423 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:09Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.160085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.160115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.160124 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.160136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.160144 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.261765 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.261794 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.261802 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.261814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.261822 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.367693 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.367724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.367732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.367761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.367770 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.470518 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.470560 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.470570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.470582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.470591 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.572188 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.572218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.572229 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.572241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.572250 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.674205 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.674235 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.674244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.674256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.674263 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.776130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.776162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.776170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.776180 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.776189 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.869396 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.869440 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:09 crc kubenswrapper[4663]: E1212 14:04:09.869487 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:09 crc kubenswrapper[4663]: E1212 14:04:09.869544 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.869441 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:09 crc kubenswrapper[4663]: E1212 14:04:09.869638 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.878173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.878200 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.878208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.878218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.878225 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.980096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.980130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.980143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.980155 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.980164 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:09Z","lastTransitionTime":"2025-12-12T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.995492 4663 generic.go:334] "Generic (PLEG): container finished" podID="e41f4560-4fe0-4b87-bb7b-a3055733abba" containerID="b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938" exitCode=0 Dec 12 14:04:09 crc kubenswrapper[4663]: I1212 14:04:09.995526 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerDied","Data":"b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.004651 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.013294 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.020151 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.028061 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.037144 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.049013 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.057695 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.066442 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.074864 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.081711 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.081757 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.081767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.081779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.081788 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.083238 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.091732 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.100075 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.106397 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.113464 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.183956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.184140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.184148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.184160 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.184168 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.285732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.285780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.285789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.285800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.285809 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.387637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.387667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.387674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.387684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.387692 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.489045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.489073 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.489082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.489093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.489101 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.591437 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.591462 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.591469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.591481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.591489 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.692855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.692919 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.692932 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.692959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.692968 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.768682 4663 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.794274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.794304 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.794313 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.794339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.794348 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.878343 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.886618 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.894142 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.896194 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.896236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.896244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.896258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.896267 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.901438 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.908240 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.914211 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.922300 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.932481 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.945800 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.954141 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.961158 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.969764 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.978390 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.990789 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:10Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.997526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.997553 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.997563 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.997575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:10 crc kubenswrapper[4663]: I1212 14:04:10.997585 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:10Z","lastTransitionTime":"2025-12-12T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.000810 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.001537 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.001567 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.001610 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.019027 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.022104 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" event={"ID":"e41f4560-4fe0-4b87-bb7b-a3055733abba","Type":"ContainerStarted","Data":"d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.029677 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.030389 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.030442 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.037878 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.046126 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.052279 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.059915 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.068688 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.080261 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.088210 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.098185 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.099250 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.099279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.099288 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.099299 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.099307 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.106477 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.114604 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.120597 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.122862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.122894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.122904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.122916 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.122924 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.128304 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.131551 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.133911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.133940 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.133949 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.133961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.133970 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.137431 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.141541 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.143899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.144022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.144087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.144143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.144217 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.145860 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.152458 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.153892 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.154353 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.154432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.154496 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.154557 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.154608 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.162167 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.162239 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.164237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.164285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.164295 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.164305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.164312 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.168587 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.171739 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.171869 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.175374 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.181725 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.190387 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.199424 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.200667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.200693 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.200704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.200715 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.200723 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.211581 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.220627 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.228199 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.236238 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.244158 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:11Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.302704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.302729 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.302737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.302767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.302776 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.404722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.404772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.404788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.404801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.404809 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.506984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.507036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.507045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.507060 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.507069 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.608852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.608883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.608891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.608903 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.608912 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.710613 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.710650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.710658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.710671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.710680 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.812987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.813022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.813032 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.813044 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.813053 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.868999 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.869026 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.869011 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.869142 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.869098 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:11 crc kubenswrapper[4663]: E1212 14:04:11.869238 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.914804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.914850 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.914860 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.914872 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:11 crc kubenswrapper[4663]: I1212 14:04:11.914880 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:11Z","lastTransitionTime":"2025-12-12T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.016637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.016674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.016683 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.016696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.016705 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.119053 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.119102 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.119112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.119124 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.119133 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.220395 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.220430 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.220438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.220453 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.220460 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.322616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.322646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.322654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.322666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.322674 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.424922 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.425154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.425162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.425174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.425182 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.526806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.526845 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.526855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.526867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.526876 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.628233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.628266 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.628276 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.628301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.628309 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.729559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.729598 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.729607 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.729619 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.729629 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.830968 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.831000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.831026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.831042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.831051 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.932533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.932568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.932577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.932589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:12 crc kubenswrapper[4663]: I1212 14:04:12.932598 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:12Z","lastTransitionTime":"2025-12-12T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.026795 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/0.log" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.028889 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc" exitCode=1 Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.028925 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.029381 4663 scope.go:117] "RemoveContainer" containerID="a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.034343 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.034405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.034414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.034449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.034459 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.040773 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.056226 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:12Z\\\",\\\"message\\\":\\\":160\\\\nI1212 14:04:12.435766 6010 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1212 14:04:12.435773 6010 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1212 14:04:12.435781 6010 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1212 14:04:12.435786 6010 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1212 14:04:12.435864 6010 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1212 14:04:12.435868 6010 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1212 14:04:12.435764 6010 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1212 14:04:12.435980 6010 factory.go:656] Stopping watch factory\\\\nI1212 14:04:12.435989 6010 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1212 14:04:12.436011 6010 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1212 14:04:12.436137 6010 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1212 14:04:12.436200 6010 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.064512 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.072867 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.081150 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.089252 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.097237 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.105809 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.116179 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.124714 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.134158 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.136206 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.136231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.136239 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.136250 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.136265 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.142958 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.150180 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.157215 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:13Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.238410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.238434 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.238442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.238453 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.238462 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.339784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.339817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.339826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.339837 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.339847 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.442304 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.442329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.442337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.442348 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.442371 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.543860 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.543889 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.543898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.543908 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.543915 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.646013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.646040 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.646048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.646059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.646067 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.650291 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.650423 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:04:29.650408895 +0000 UTC m=+49.075826949 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.747727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.747771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.747779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.747789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.747796 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.751134 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.751162 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.751181 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.751200 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751246 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751266 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751271 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751285 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751293 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:29.751282535 +0000 UTC m=+49.176700579 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751293 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751305 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:29.751299678 +0000 UTC m=+49.176717732 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751319 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:29.751312272 +0000 UTC m=+49.176730326 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751328 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751339 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751347 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.751386 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:04:29.751378587 +0000 UTC m=+49.176796641 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.849317 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.849346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.849354 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.849383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.849391 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.869806 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.869809 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.869895 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.869999 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.870053 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:13 crc kubenswrapper[4663]: E1212 14:04:13.870096 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.951101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.951130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.951139 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.951151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:13 crc kubenswrapper[4663]: I1212 14:04:13.951161 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:13Z","lastTransitionTime":"2025-12-12T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.032101 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/1.log" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.032579 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/0.log" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.034228 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c" exitCode=1 Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.034256 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.034291 4663 scope.go:117] "RemoveContainer" containerID="a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.034685 4663 scope.go:117] "RemoveContainer" containerID="632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c" Dec 12 14:04:14 crc kubenswrapper[4663]: E1212 14:04:14.034820 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.048840 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:12Z\\\",\\\"message\\\":\\\":160\\\\nI1212 14:04:12.435766 6010 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1212 14:04:12.435773 6010 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1212 14:04:12.435781 6010 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1212 14:04:12.435786 6010 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1212 14:04:12.435864 6010 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1212 14:04:12.435868 6010 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1212 14:04:12.435764 6010 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1212 14:04:12.435980 6010 factory.go:656] Stopping watch factory\\\\nI1212 14:04:12.435989 6010 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1212 14:04:12.436011 6010 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1212 14:04:12.436137 6010 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1212 14:04:12.436200 6010 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.053116 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.053142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.053150 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.053162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.053170 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.057592 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.064610 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.073965 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.081565 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.087993 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.096101 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.105402 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.112938 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.121051 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.129048 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.137279 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.143604 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.150763 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.155235 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.155329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.155945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.155966 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.155976 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.257887 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.257917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.257925 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.257937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.257947 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.359250 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.359275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.359284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.359296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.359303 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.461504 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.461533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.461541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.461552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.461560 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.498999 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.507536 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.516421 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.525001 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.532775 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.539187 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.545198 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.552404 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.558691 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.563703 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.563732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.563741 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.563767 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.563777 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.567462 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.576112 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.587505 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9bb34c1401b657dd222c866258f5d249405de305a03e4a823b2db5f500d19bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:12Z\\\",\\\"message\\\":\\\":160\\\\nI1212 14:04:12.435766 6010 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1212 14:04:12.435773 6010 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1212 14:04:12.435781 6010 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1212 14:04:12.435786 6010 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1212 14:04:12.435864 6010 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1212 14:04:12.435868 6010 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1212 14:04:12.435764 6010 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1212 14:04:12.435980 6010 factory.go:656] Stopping watch factory\\\\nI1212 14:04:12.435989 6010 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1212 14:04:12.436011 6010 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1212 14:04:12.436137 6010 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1212 14:04:12.436200 6010 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.594906 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.601616 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.608841 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:14Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.665627 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.665650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.665672 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.665684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.665692 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.767359 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.767409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.767418 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.767432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.767441 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.869908 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.869940 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.869948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.869960 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.869970 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.971626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.971656 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.971666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.971678 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:14 crc kubenswrapper[4663]: I1212 14:04:14.971686 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:14Z","lastTransitionTime":"2025-12-12T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.037962 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/1.log" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.040474 4663 scope.go:117] "RemoveContainer" containerID="632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c" Dec 12 14:04:15 crc kubenswrapper[4663]: E1212 14:04:15.040591 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.048887 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.057526 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.068817 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.073251 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.073276 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.073285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.073297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.073306 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.076810 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.084897 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.092364 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.099545 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.105635 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.113171 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.121490 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.129448 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.137140 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.144433 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.151307 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:15Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.174675 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.174701 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.174710 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.174722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.174729 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.276800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.276827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.276835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.276846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.276854 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.379076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.379110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.379119 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.379132 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.379148 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.481021 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.481055 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.481065 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.481079 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.481087 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.583113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.583146 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.583154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.583165 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.583173 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.684935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.684982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.684991 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.685003 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.685012 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.786272 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.786309 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.786318 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.786331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.786342 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.869558 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.869618 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.869559 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:15 crc kubenswrapper[4663]: E1212 14:04:15.869658 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:15 crc kubenswrapper[4663]: E1212 14:04:15.869698 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:15 crc kubenswrapper[4663]: E1212 14:04:15.869778 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.888564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.888589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.888598 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.888608 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.888618 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.989911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.989947 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.989956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.989970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:15 crc kubenswrapper[4663]: I1212 14:04:15.989986 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:15Z","lastTransitionTime":"2025-12-12T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.026440 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m"] Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.026764 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.027897 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.028794 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.037367 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.050025 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.058017 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.065466 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.067301 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/09e108fb-f53f-46db-b289-c45cb40811f5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.067345 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9bsk\" (UniqueName: \"kubernetes.io/projected/09e108fb-f53f-46db-b289-c45cb40811f5-kube-api-access-s9bsk\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.067371 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/09e108fb-f53f-46db-b289-c45cb40811f5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.067408 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/09e108fb-f53f-46db-b289-c45cb40811f5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.078834 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.091452 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.091485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.091493 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.091506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.091515 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.095873 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.106575 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.115221 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.123619 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.132672 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.140873 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.148705 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.155032 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.162041 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.168343 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/09e108fb-f53f-46db-b289-c45cb40811f5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.168375 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/09e108fb-f53f-46db-b289-c45cb40811f5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.168417 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9bsk\" (UniqueName: \"kubernetes.io/projected/09e108fb-f53f-46db-b289-c45cb40811f5-kube-api-access-s9bsk\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.168441 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/09e108fb-f53f-46db-b289-c45cb40811f5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.168909 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/09e108fb-f53f-46db-b289-c45cb40811f5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.169072 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/09e108fb-f53f-46db-b289-c45cb40811f5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.169297 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:16Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.172102 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/09e108fb-f53f-46db-b289-c45cb40811f5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.179665 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9bsk\" (UniqueName: \"kubernetes.io/projected/09e108fb-f53f-46db-b289-c45cb40811f5-kube-api-access-s9bsk\") pod \"ovnkube-control-plane-749d76644c-z766m\" (UID: \"09e108fb-f53f-46db-b289-c45cb40811f5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.193673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.193702 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.193711 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.193722 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.193730 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.295805 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.295843 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.295854 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.295869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.295880 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.337220 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" Dec 12 14:04:16 crc kubenswrapper[4663]: W1212 14:04:16.347244 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09e108fb_f53f_46db_b289_c45cb40811f5.slice/crio-27567c0dc893f3877520bcbd104340840213bc5af3b59b64b6323d46af8d00e8 WatchSource:0}: Error finding container 27567c0dc893f3877520bcbd104340840213bc5af3b59b64b6323d46af8d00e8: Status 404 returned error can't find the container with id 27567c0dc893f3877520bcbd104340840213bc5af3b59b64b6323d46af8d00e8 Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.397924 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.397956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.397964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.397977 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.397985 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.499877 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.499907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.499916 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.499928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.499935 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.601997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.602023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.602031 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.602042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.602050 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.704039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.704067 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.704075 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.704087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.704095 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.805661 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.805687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.805695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.805707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.805715 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.907242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.907451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.907521 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.907580 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:16 crc kubenswrapper[4663]: I1212 14:04:16.907630 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:16Z","lastTransitionTime":"2025-12-12T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.009697 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.009810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.009829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.009843 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.009852 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.044770 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" event={"ID":"09e108fb-f53f-46db-b289-c45cb40811f5","Type":"ContainerStarted","Data":"4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.044805 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" event={"ID":"09e108fb-f53f-46db-b289-c45cb40811f5","Type":"ContainerStarted","Data":"c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.044818 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" event={"ID":"09e108fb-f53f-46db-b289-c45cb40811f5","Type":"ContainerStarted","Data":"27567c0dc893f3877520bcbd104340840213bc5af3b59b64b6323d46af8d00e8"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.053852 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.061325 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.069611 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.073858 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-9t6qf"] Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.074208 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.074257 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.079647 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.095390 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.103462 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.111091 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.112197 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.112222 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.112230 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.112243 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.112251 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.118800 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.126817 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.135964 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.144138 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.152659 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.159951 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.169500 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.175605 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r5mq\" (UniqueName: \"kubernetes.io/projected/91ce7def-b363-4fe9-8d21-b161e64d0f1c-kube-api-access-9r5mq\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.175647 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.176668 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.185114 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.192634 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.201552 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.210734 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.213761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.213793 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.213801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.213815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.213823 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.218714 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.225949 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.232125 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.238734 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.245197 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.253394 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.262330 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.273861 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.275962 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r5mq\" (UniqueName: \"kubernetes.io/projected/91ce7def-b363-4fe9-8d21-b161e64d0f1c-kube-api-access-9r5mq\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.276001 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.276083 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.276127 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:04:17.776116799 +0000 UTC m=+37.201534852 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.281476 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.289355 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r5mq\" (UniqueName: \"kubernetes.io/projected/91ce7def-b363-4fe9-8d21-b161e64d0f1c-kube-api-access-9r5mq\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.290377 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.297839 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.305541 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:17Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.315904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.315938 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.315947 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.315962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.315970 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.417931 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.417964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.417974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.417986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.417996 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.520051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.520087 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.520095 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.520107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.520116 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.621697 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.621730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.621738 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.621771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.621782 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.723828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.723859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.723867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.723879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.723886 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.780512 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.780668 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.780830 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:04:18.78081547 +0000 UTC m=+38.206233525 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.825507 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.825556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.825582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.825594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.825602 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.869209 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.869220 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.869300 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.869322 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.869418 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:17 crc kubenswrapper[4663]: E1212 14:04:17.869489 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.927623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.927652 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.927662 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.927691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:17 crc kubenswrapper[4663]: I1212 14:04:17.927700 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:17Z","lastTransitionTime":"2025-12-12T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.029607 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.029635 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.029645 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.029657 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.029665 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.131068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.131106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.131117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.131132 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.131150 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.233467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.233498 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.233506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.233517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.233525 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.335910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.335935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.335942 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.335954 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.335962 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.438067 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.438101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.438109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.438121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.438129 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.539401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.539453 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.539463 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.539474 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.539482 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.641547 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.641576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.641584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.641593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.641600 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.743522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.743550 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.743558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.743568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.743575 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.787683 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:18 crc kubenswrapper[4663]: E1212 14:04:18.787795 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:18 crc kubenswrapper[4663]: E1212 14:04:18.787836 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:04:20.78782377 +0000 UTC m=+40.213241824 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.845464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.845492 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.845500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.845510 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.845518 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.869261 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:18 crc kubenswrapper[4663]: E1212 14:04:18.869357 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.947512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.947538 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.947546 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.947555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:18 crc kubenswrapper[4663]: I1212 14:04:18.947562 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:18Z","lastTransitionTime":"2025-12-12T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.048664 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.048695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.048704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.048717 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.048725 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.150361 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.150393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.150402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.150413 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.150422 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.252037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.252101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.252110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.252121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.252129 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.353984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.354152 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.354160 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.354169 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.354177 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.456381 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.456409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.456417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.456438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.456446 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.558424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.558469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.558477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.558491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.558511 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.660291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.660325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.660333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.660344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.660351 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.762045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.762077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.762085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.762094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.762102 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.863574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.863609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.863637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.863654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.863670 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.869825 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.869863 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.869883 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:19 crc kubenswrapper[4663]: E1212 14:04:19.869907 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:19 crc kubenswrapper[4663]: E1212 14:04:19.869965 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:19 crc kubenswrapper[4663]: E1212 14:04:19.870028 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.965968 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.966001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.966011 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.966023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:19 crc kubenswrapper[4663]: I1212 14:04:19.966033 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:19Z","lastTransitionTime":"2025-12-12T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.067696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.067885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.067977 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.068044 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.068125 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.169359 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.169383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.169390 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.169399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.169407 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.271179 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.271296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.271358 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.271427 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.271498 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.372983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.373014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.373022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.373035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.373044 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.474782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.474818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.474828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.474842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.474852 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.576561 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.576590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.576599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.576609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.576616 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.678724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.678902 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.678962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.679029 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.679082 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.781369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.781547 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.781633 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.781698 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.781775 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.806071 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:20 crc kubenswrapper[4663]: E1212 14:04:20.806275 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:20 crc kubenswrapper[4663]: E1212 14:04:20.806397 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:04:24.806383017 +0000 UTC m=+44.231801071 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.869102 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:20 crc kubenswrapper[4663]: E1212 14:04:20.869510 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.880651 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.882853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.882883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.882894 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.882907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.882916 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.889256 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.898241 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.904357 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.911514 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.918801 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.925825 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.933244 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.941195 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.947954 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.956343 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.965907 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.978024 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.985016 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.985051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.985061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.985074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.985082 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:20Z","lastTransitionTime":"2025-12-12T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.987347 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:20 crc kubenswrapper[4663]: I1212 14:04:20.994614 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:20Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.002332 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:21Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.086526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.086559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.086568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.086581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.086589 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.188241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.188275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.188283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.188297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.188307 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.208194 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.208231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.208242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.208255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.208264 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.217163 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:21Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.219443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.219489 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.219500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.219512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.219522 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.227853 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:21Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.230024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.230053 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.230063 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.230075 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.230083 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.238154 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:21Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.241824 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.241855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.241865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.241879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.241892 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.251069 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:21Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.253472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.253499 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.253508 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.253519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.253527 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.261353 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:21Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.261463 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.290119 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.290154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.290166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.290181 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.290191 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.392168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.392226 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.392236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.392247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.392255 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.494282 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.494305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.494313 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.494332 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.494340 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.596239 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.596270 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.596279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.596291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.596302 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.698340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.698374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.698383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.698393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.698400 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.800004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.800039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.800050 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.800063 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.800072 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.869844 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.869885 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.869860 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.869949 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.870056 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:21 crc kubenswrapper[4663]: E1212 14:04:21.870111 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.902291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.902327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.902339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.902353 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:21 crc kubenswrapper[4663]: I1212 14:04:21.902365 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:21Z","lastTransitionTime":"2025-12-12T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.004189 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.004218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.004226 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.004238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.004246 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.106289 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.106322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.106330 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.106342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.106350 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.207569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.207953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.208015 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.208078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.208138 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.309558 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.309590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.309598 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.309612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.309621 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.411622 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.411647 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.411656 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.411668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.411675 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.513026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.513051 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.513060 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.513072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.513081 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.615263 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.615303 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.615314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.615327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.615336 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.717298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.717496 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.717562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.717619 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.717702 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.820027 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.820055 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.820064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.820077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.820085 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.869345 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:22 crc kubenswrapper[4663]: E1212 14:04:22.869726 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.922151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.922181 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.922190 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.922201 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:22 crc kubenswrapper[4663]: I1212 14:04:22.922210 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:22Z","lastTransitionTime":"2025-12-12T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.023852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.023883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.023892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.023904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.023912 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.125904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.125946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.125969 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.125984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.125993 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.229724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.229779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.229788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.229800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.229809 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.331256 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.331291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.331301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.331314 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.331324 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.432775 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.432808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.432816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.432828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.432836 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.534712 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.534759 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.534769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.534783 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.534791 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.636360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.636388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.636397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.636408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.636416 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.738204 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.738237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.738245 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.738258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.738267 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.839644 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.839676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.839684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.839696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.839706 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.869220 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.869240 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.869263 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:23 crc kubenswrapper[4663]: E1212 14:04:23.869332 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:23 crc kubenswrapper[4663]: E1212 14:04:23.869401 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:23 crc kubenswrapper[4663]: E1212 14:04:23.869475 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.941709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.941741 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.941765 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.941782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:23 crc kubenswrapper[4663]: I1212 14:04:23.941792 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:23Z","lastTransitionTime":"2025-12-12T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.043853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.043882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.043890 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.043924 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.043932 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.146048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.146078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.146088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.146099 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.146108 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.247899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.247928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.247936 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.247945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.247952 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.349797 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.349823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.349830 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.349842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.349849 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.451568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.451603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.451612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.451624 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.451632 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.553008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.553039 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.553047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.553059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.553067 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.654716 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.654766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.654788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.654802 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.654810 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.756346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.756373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.756381 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.756392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.756404 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.839642 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:24 crc kubenswrapper[4663]: E1212 14:04:24.839784 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:24 crc kubenswrapper[4663]: E1212 14:04:24.839864 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:04:32.839844614 +0000 UTC m=+52.265262668 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.858414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.858446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.858454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.858478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.858486 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.869900 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:24 crc kubenswrapper[4663]: E1212 14:04:24.869997 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.960654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.960686 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.960697 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.960709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:24 crc kubenswrapper[4663]: I1212 14:04:24.960717 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:24Z","lastTransitionTime":"2025-12-12T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.061636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.061664 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.061672 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.061682 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.061690 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.163764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.163795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.163803 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.163816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.163825 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.265345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.265390 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.265399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.265416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.265426 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.367912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.367946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.367955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.367966 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.367974 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.469536 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.469639 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.469650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.469662 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.469669 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.571157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.571195 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.571208 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.571222 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.571231 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.672768 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.672800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.672808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.672819 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.672827 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.774249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.774280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.774288 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.774299 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.774309 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.869736 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.869815 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:25 crc kubenswrapper[4663]: E1212 14:04:25.869898 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.869938 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:25 crc kubenswrapper[4663]: E1212 14:04:25.869983 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:25 crc kubenswrapper[4663]: E1212 14:04:25.870103 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.875557 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.875582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.875590 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.875600 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.875607 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.976876 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.976909 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.976917 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.976928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:25 crc kubenswrapper[4663]: I1212 14:04:25.976936 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:25Z","lastTransitionTime":"2025-12-12T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.078403 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.078436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.078444 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.078455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.078464 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.180367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.180402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.180410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.180422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.180431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.282724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.282766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.282774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.282785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.282792 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.384566 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.384621 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.384630 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.384644 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.384655 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.486287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.486331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.486340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.486351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.486360 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.587928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.587969 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.587978 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.587989 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.587996 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.690132 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.690173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.690182 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.690198 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.690207 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.792210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.792240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.792248 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.792259 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.792267 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.869540 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:26 crc kubenswrapper[4663]: E1212 14:04:26.869638 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.894090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.894127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.894136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.894148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.894157 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.996056 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.996098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.996107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.996121 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:26 crc kubenswrapper[4663]: I1212 14:04:26.996129 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:26Z","lastTransitionTime":"2025-12-12T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.098057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.098111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.098119 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.098131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.098142 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.199429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.199459 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.199467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.199494 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.199502 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.301609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.301642 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.301651 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.301695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.301705 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.404090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.404128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.404137 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.404149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.404161 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.505694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.505728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.505736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.505764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.505774 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.607460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.607491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.607500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.607512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.607520 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.709279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.709311 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.709320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.709333 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.709341 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.811636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.811729 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.811737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.811766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.811775 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.869669 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.869699 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.869681 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:27 crc kubenswrapper[4663]: E1212 14:04:27.869784 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:27 crc kubenswrapper[4663]: E1212 14:04:27.869842 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:27 crc kubenswrapper[4663]: E1212 14:04:27.869897 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.913716 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.913762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.913772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.913782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:27 crc kubenswrapper[4663]: I1212 14:04:27.913790 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:27Z","lastTransitionTime":"2025-12-12T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.015206 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.015242 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.015252 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.015264 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.015272 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.117254 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.117308 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.117317 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.117329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.117338 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.219388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.219417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.219425 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.219437 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.219445 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.321096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.321128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.321139 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.321156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.321164 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.423220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.423249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.423258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.423268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.423277 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.525255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.525279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.525287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.525299 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.525308 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.627142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.627177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.627187 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.627200 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.627208 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.728973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.729004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.729014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.729027 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.729036 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.830817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.830848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.830856 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.830867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.830875 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.869622 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:28 crc kubenswrapper[4663]: E1212 14:04:28.869725 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.932932 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.932968 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.932976 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.932986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:28 crc kubenswrapper[4663]: I1212 14:04:28.932995 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:28Z","lastTransitionTime":"2025-12-12T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.034725 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.034772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.034781 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.034793 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.034801 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.136814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.136844 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.136853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.136864 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.136871 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.238386 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.238421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.238433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.238445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.238476 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.340081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.340134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.340143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.340158 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.340167 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.441764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.441808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.441833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.441853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.441861 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.543865 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.543891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.543899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.543910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.543917 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.646324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.646360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.646377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.646389 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.646397 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.679839 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.679998 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:05:01.67997955 +0000 UTC m=+81.105397604 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.748316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.748346 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.748355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.748367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.748375 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.780951 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.780991 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.781019 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.781035 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781099 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781116 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781124 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781136 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781151 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781131 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:05:01.781121389 +0000 UTC m=+81.206539442 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781196 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:05:01.78118481 +0000 UTC m=+81.206602864 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781208 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:05:01.781201811 +0000 UTC m=+81.206619875 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781681 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781779 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781838 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.781941 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:05:01.781925374 +0000 UTC m=+81.207343428 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.849441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.849474 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.849485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.849513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.849523 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.869049 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.869099 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.869158 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.869167 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.869385 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:29 crc kubenswrapper[4663]: E1212 14:04:29.869441 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.869666 4663 scope.go:117] "RemoveContainer" containerID="632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.951299 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.951329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.951337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.951366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:29 crc kubenswrapper[4663]: I1212 14:04:29.951374 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:29Z","lastTransitionTime":"2025-12-12T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.053311 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.053336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.053345 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.053377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.053385 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.074769 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/1.log" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.076853 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.077190 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.088016 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.097002 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.108206 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.121218 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.133323 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.141356 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.149740 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.155874 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.155922 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.155933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.155945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.155954 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.158542 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.166663 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.173652 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.182046 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.190613 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.198477 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.205985 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.212197 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.219045 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.257170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.257197 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.257204 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.257216 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.257225 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.361108 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.361164 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.361177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.361193 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.361203 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.462911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.462943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.462952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.462966 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.462974 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.565231 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.565264 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.565273 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.565285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.565294 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.668998 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.669026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.669035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.669047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.669056 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.770982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.771016 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.771025 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.771037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.771045 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.869122 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:30 crc kubenswrapper[4663]: E1212 14:04:30.869235 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.872875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.872911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.872920 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.872931 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.872940 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.879283 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.886954 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.895980 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.904709 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.912073 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.918966 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.926263 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.934697 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.945514 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.957019 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.964440 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.971252 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.974539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.974569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.974597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.974611 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.974619 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:30Z","lastTransitionTime":"2025-12-12T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.980076 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.988437 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:30 crc kubenswrapper[4663]: I1212 14:04:30.995265 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:30Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.003957 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.076213 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.076266 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.076276 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.076289 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.076299 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.080102 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/2.log" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.080645 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/1.log" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.082525 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56" exitCode=1 Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.082553 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.082590 4663 scope.go:117] "RemoveContainer" containerID="632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.082979 4663 scope.go:117] "RemoveContainer" containerID="3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56" Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.083104 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.093847 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.103418 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.112244 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.119704 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.126991 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.133698 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.141547 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.147896 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.156348 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.168810 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.178052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.178084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.178093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.178105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.178114 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.182162 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://632eacd8b57a91706d2308c79b79e40a0e3ed4dde250afa8b6aa07b0d0b3894c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:13Z\\\",\\\"message\\\":\\\"k:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605695 6128 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:13.605722 6128 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1212 14:04:13.605610 6128 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.190907 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.198450 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.206040 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.213547 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.220050 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.279787 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.279814 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.279823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.279864 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.279874 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.381628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.381658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.381668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.381710 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.381718 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.484031 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.484057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.484066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.484077 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.484085 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.531706 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.531737 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.531762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.531773 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.531780 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.540668 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.543228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.543253 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.543287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.543299 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.543307 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.551006 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.553294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.553328 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.553336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.553349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.553357 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.561262 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.563444 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.563475 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.563485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.563497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.563505 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.571783 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.573841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.573862 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.573870 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.573879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.573887 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.581399 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:31Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.581517 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.585349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.585398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.585408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.585422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.585431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.687225 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.687481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.687564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.687655 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.687710 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.789873 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.789902 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.789911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.789923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.789932 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.869021 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.869046 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.869024 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.869160 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.869219 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:31 crc kubenswrapper[4663]: E1212 14:04:31.869298 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.891734 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.891779 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.891787 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.891801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.891809 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.994283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.994329 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.994339 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.994353 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:31 crc kubenswrapper[4663]: I1212 14:04:31.994362 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:31Z","lastTransitionTime":"2025-12-12T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.086635 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/2.log" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.089219 4663 scope.go:117] "RemoveContainer" containerID="3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56" Dec 12 14:04:32 crc kubenswrapper[4663]: E1212 14:04:32.089343 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.097392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.097492 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.098554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.098577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.098591 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.099944 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.107044 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.115028 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.124356 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.135856 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.143464 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.150429 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.157696 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.165621 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.174089 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.182446 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.190299 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.198171 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.200390 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.200416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.200425 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.200440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.200448 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.205792 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.212889 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.219740 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:32Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.301841 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.301888 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.301897 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.301908 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.301918 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.403081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.403117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.403126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.403139 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.403147 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.504898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.504934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.504943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.504952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.504959 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.607127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.607159 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.607170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.607182 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.607190 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.708913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.708944 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.708952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.708965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.708973 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.810724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.810776 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.810785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.810798 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.810805 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.869235 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:32 crc kubenswrapper[4663]: E1212 14:04:32.869326 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.906986 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:32 crc kubenswrapper[4663]: E1212 14:04:32.907117 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:32 crc kubenswrapper[4663]: E1212 14:04:32.907166 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:04:48.907153681 +0000 UTC m=+68.332571736 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.912278 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.912312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.912320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.912334 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:32 crc kubenswrapper[4663]: I1212 14:04:32.912349 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:32Z","lastTransitionTime":"2025-12-12T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.014117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.014147 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.014155 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.014166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.014186 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.115665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.115687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.115695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.115705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.115713 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.216790 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.216813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.216821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.216833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.216840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.318176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.318374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.318438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.318496 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.318567 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.420080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.420477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.420536 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.420595 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.420686 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.522828 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.522858 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.522867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.522892 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.522900 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.624798 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.624827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.624835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.624847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.624855 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.727099 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.727130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.727140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.727153 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.727161 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.829371 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.829400 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.829408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.829418 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.829427 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.869602 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.869649 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.869654 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:33 crc kubenswrapper[4663]: E1212 14:04:33.869706 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:33 crc kubenswrapper[4663]: E1212 14:04:33.869794 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:33 crc kubenswrapper[4663]: E1212 14:04:33.869855 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.934043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.934073 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.934082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.934094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:33 crc kubenswrapper[4663]: I1212 14:04:33.934103 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:33Z","lastTransitionTime":"2025-12-12T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.036170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.036194 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.036204 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.036214 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.036237 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.137668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.137692 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.137700 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.137711 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.137735 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.239868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.239893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.239901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.239914 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.239922 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.341040 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.341073 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.341083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.341096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.341104 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.442551 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.442578 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.442586 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.442597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.442606 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.544569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.544606 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.544616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.544647 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.544657 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.646370 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.646401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.646409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.646422 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.646431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.748401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.748430 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.748438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.748451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.748460 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.850236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.850271 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.850280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.850291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.850299 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.869923 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:34 crc kubenswrapper[4663]: E1212 14:04:34.870010 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.951322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.951347 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.951355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.951366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:34 crc kubenswrapper[4663]: I1212 14:04:34.951374 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:34Z","lastTransitionTime":"2025-12-12T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.052873 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.052904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.052912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.052941 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.052949 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.155140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.155173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.155182 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.155195 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.155203 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.190175 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.197444 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.199343 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.207731 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.215454 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.223097 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.229610 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.237114 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.246450 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.257357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.257384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.257392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.257405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.257414 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.258562 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.266535 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.273334 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.281885 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.290101 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.297862 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.304232 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.310959 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.317853 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:35Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.359691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.359720 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.359730 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.359758 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.359769 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.461788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.461815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.461823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.461833 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.461842 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.563320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.563359 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.563370 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.563384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.563394 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.665132 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.665168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.665176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.665187 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.665195 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.766384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.766407 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.766416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.766426 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.766433 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.868601 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.868628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.868637 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.868665 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.868673 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.869061 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.869108 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:35 crc kubenswrapper[4663]: E1212 14:04:35.869132 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:35 crc kubenswrapper[4663]: E1212 14:04:35.869183 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.869231 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:35 crc kubenswrapper[4663]: E1212 14:04:35.869290 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.970321 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.970435 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.970519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.970592 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:35 crc kubenswrapper[4663]: I1212 14:04:35.970667 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:35Z","lastTransitionTime":"2025-12-12T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.072186 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.072225 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.072236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.072249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.072259 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.173962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.173985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.173993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.174004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.174012 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.275709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.275736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.275759 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.275771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.275779 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.377777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.377799 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.377807 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.377818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.377825 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.479741 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.480094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.480154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.480227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.480287 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.581679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.581707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.581715 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.581726 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.581734 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.683226 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.683331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.683424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.683617 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.683793 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.785190 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.785220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.785229 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.785240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.785248 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.869316 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:36 crc kubenswrapper[4663]: E1212 14:04:36.869426 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.887335 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.887362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.887371 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.887381 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.887389 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.989620 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.989649 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.989676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.989690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:36 crc kubenswrapper[4663]: I1212 14:04:36.989699 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:36Z","lastTransitionTime":"2025-12-12T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.091350 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.091382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.091391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.091401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.091408 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.192691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.192724 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.192734 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.192761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.192769 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.294807 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.294842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.294852 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.294868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.294879 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.396593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.396643 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.396654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.396676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.396685 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.498647 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.498699 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.498708 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.498721 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.498730 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.600097 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.600148 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.600157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.600168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.600180 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.701879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.701913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.701920 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.701933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.701942 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.803220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.803240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.803248 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.803259 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.803266 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.868992 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.869027 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.869004 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:37 crc kubenswrapper[4663]: E1212 14:04:37.869077 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:37 crc kubenswrapper[4663]: E1212 14:04:37.869152 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:37 crc kubenswrapper[4663]: E1212 14:04:37.869204 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.904788 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.904810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.904818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.904831 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:37 crc kubenswrapper[4663]: I1212 14:04:37.904840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:37Z","lastTransitionTime":"2025-12-12T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.006647 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.006687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.006696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.006706 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.006715 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.107706 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.107736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.107761 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.107774 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.107783 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.208993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.209026 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.209035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.209046 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.209054 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.311252 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.311292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.311300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.311313 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.311322 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.413524 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.413559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.413570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.413582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.413590 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.515448 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.515483 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.515491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.515502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.515511 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.617472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.617503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.617511 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.617522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.617532 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.719594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.719625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.719633 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.719645 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.719654 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.821134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.821166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.821174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.821187 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.821197 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.869788 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:38 crc kubenswrapper[4663]: E1212 14:04:38.869939 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.922853 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.922898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.922923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.922937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:38 crc kubenswrapper[4663]: I1212 14:04:38.922947 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:38Z","lastTransitionTime":"2025-12-12T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.024813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.024848 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.024885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.024898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.024906 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.128049 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.128083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.128094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.128107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.128115 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.229979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.230043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.230052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.230064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.230072 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.331605 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.331822 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.331910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.331976 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.332042 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.433681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.433719 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.433727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.433738 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.433771 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.535661 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.536032 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.536114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.536184 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.536242 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.639021 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.639069 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.639081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.639107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.639118 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.740826 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.740974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.741071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.741238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.741378 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.843337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.843459 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.843531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.843595 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.843657 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.870045 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:39 crc kubenswrapper[4663]: E1212 14:04:39.870157 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.870252 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:39 crc kubenswrapper[4663]: E1212 14:04:39.870402 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.870305 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:39 crc kubenswrapper[4663]: E1212 14:04:39.870624 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.946050 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.946109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.946129 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.946153 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:39 crc kubenswrapper[4663]: I1212 14:04:39.946166 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:39Z","lastTransitionTime":"2025-12-12T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.048326 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.048361 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.048370 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.048382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.048391 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.150557 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.150592 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.150601 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.150614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.150623 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.253093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.253131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.253140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.253154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.253163 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.354935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.354973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.354982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.354997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.355008 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.457088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.457127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.457136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.457151 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.457160 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.559028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.559063 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.559072 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.559086 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.559093 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.660910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.660962 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.660973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.660993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.661007 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.763263 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.763297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.763322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.763335 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.763343 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.865454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.865491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.865500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.865513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.865522 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.869717 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:40 crc kubenswrapper[4663]: E1212 14:04:40.869856 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.878941 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.888808 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.900367 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.909690 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.918374 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.927912 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.938894 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.946401 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.954740 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.962308 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.967343 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.967367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.967377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.967411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.967421 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:40Z","lastTransitionTime":"2025-12-12T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.971352 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:40 crc kubenswrapper[4663]: I1212 14:04:40.984815 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:40Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.003897 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.020333 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.037923 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.046492 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.054455 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.069274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.069306 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.069315 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.069327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.069336 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.171057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.171096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.171105 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.171118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.171127 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.273439 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.273463 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.273471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.273482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.273491 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.375533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.375567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.375577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.375588 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.375596 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.477280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.477304 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.477312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.477323 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.477331 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.579387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.579423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.579431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.579441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.579448 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.649543 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.649575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.649583 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.649597 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.649605 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.658684 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.661465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.661494 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.661503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.661516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.661526 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.670376 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.672946 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.672972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.672987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.672998 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.673005 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.681171 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.683328 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.683349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.683357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.683366 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.683373 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.690974 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.693367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.693392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.693399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.693408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.693432 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.702256 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:41Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.702355 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.703290 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.703316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.703325 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.703335 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.703342 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.805638 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.805670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.805679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.805691 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.805700 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.869219 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.869274 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.869369 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.869438 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.869612 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:41 crc kubenswrapper[4663]: E1212 14:04:41.869710 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.908492 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.908542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.908552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.908570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:41 crc kubenswrapper[4663]: I1212 14:04:41.908581 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:41Z","lastTransitionTime":"2025-12-12T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.011387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.011421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.011434 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.011448 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.011457 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.113965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.113994 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.114004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.114013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.114039 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.215469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.215508 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.215518 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.215531 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.215540 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.318352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.318400 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.318410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.318425 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.318434 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.421674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.421770 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.421783 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.421804 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.421816 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.524399 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.524455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.524466 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.524488 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.524507 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.626356 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.626393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.626405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.626421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.626436 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.728789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.728818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.728827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.728838 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.728845 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.831153 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.831202 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.831214 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.831233 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.831243 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.869882 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:42 crc kubenswrapper[4663]: E1212 14:04:42.870025 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.933585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.933646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.933659 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.933681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:42 crc kubenswrapper[4663]: I1212 14:04:42.933695 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:42Z","lastTransitionTime":"2025-12-12T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.035948 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.035996 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.036005 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.036023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.036036 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.138351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.138391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.138401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.138419 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.138430 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.241065 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.241104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.241114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.241130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.241139 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.342600 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.342633 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.342643 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.342655 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.342667 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.444810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.444843 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.444854 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.444867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.444874 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.547628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.547670 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.547680 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.547696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.547705 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.649955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.649988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.649999 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.650013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.650023 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.751877 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.751912 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.751921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.751933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.751942 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.854575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.854605 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.854614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.854664 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.854673 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.869138 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.869138 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.869212 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:43 crc kubenswrapper[4663]: E1212 14:04:43.869300 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:43 crc kubenswrapper[4663]: E1212 14:04:43.869427 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:43 crc kubenswrapper[4663]: E1212 14:04:43.869509 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.956555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.956589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.956599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.956616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:43 crc kubenswrapper[4663]: I1212 14:04:43.956625 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:43Z","lastTransitionTime":"2025-12-12T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.059017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.059053 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.059066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.059079 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.059090 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.161300 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.161338 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.161347 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.161363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.162025 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.264428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.264469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.264478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.264491 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.264500 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.366483 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.366693 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.366813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.366881 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.366939 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.469279 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.469316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.469324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.469335 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.469344 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.570812 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.570940 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.571060 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.571123 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.571189 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.674128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.674162 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.674170 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.674201 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.674210 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.776836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.776886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.776895 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.776910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.776919 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.869859 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:44 crc kubenswrapper[4663]: E1212 14:04:44.869998 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.878983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.879020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.879028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.879043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.879052 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.981351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.981383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.981396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.981410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:44 crc kubenswrapper[4663]: I1212 14:04:44.981419 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:44Z","lastTransitionTime":"2025-12-12T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.084528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.084563 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.084574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.084587 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.084595 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.186402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.186443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.186453 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.186467 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.186477 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.288471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.288511 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.288520 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.288535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.288543 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.390387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.390442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.390454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.390466 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.390475 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.492471 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.492521 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.492530 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.492541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.492551 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.594429 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.594462 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.594472 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.594485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.594495 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.696835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.696875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.696884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.696898 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.696909 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.798354 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.798388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.798396 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.798409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.798417 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.869271 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.869294 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.869312 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:45 crc kubenswrapper[4663]: E1212 14:04:45.869378 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:45 crc kubenswrapper[4663]: E1212 14:04:45.869459 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:45 crc kubenswrapper[4663]: E1212 14:04:45.869569 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.900104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.900135 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.900146 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.900161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:45 crc kubenswrapper[4663]: I1212 14:04:45.900171 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:45Z","lastTransitionTime":"2025-12-12T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.002161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.002180 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.002190 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.002202 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.002210 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.104020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.104058 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.104068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.104081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.104091 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.206574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.206643 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.206654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.206680 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.206705 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.309541 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.309585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.309598 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.309618 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.309629 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.411895 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.411945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.411955 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.411974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.411989 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.514236 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.514275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.514285 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.514301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.514310 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.616927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.616972 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.616988 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.617003 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.617013 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.719234 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.719273 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.719283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.719296 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.719306 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.821915 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.821950 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.821959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.821970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.821980 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.869778 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:46 crc kubenswrapper[4663]: E1212 14:04:46.869894 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.870398 4663 scope.go:117] "RemoveContainer" containerID="3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56" Dec 12 14:04:46 crc kubenswrapper[4663]: E1212 14:04:46.870605 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.924211 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.924251 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.924260 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.924275 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:46 crc kubenswrapper[4663]: I1212 14:04:46.924285 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:46Z","lastTransitionTime":"2025-12-12T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.026307 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.026341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.026355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.026380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.026390 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.128209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.128283 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.128297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.128320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.128336 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.230888 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.230970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.230984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.231004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.231016 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.333956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.334014 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.334028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.334060 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.334077 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.436274 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.436315 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.436324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.436341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.436356 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.538229 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.538294 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.538305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.538319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.538328 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.640199 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.640244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.640255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.640280 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.640292 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.741967 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.742001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.742010 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.742024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.742033 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.843463 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.843506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.843517 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.843535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.843545 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.869116 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.869164 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.869209 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:47 crc kubenswrapper[4663]: E1212 14:04:47.869240 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:47 crc kubenswrapper[4663]: E1212 14:04:47.869301 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:47 crc kubenswrapper[4663]: E1212 14:04:47.869356 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.945987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.946028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.946038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.946054 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:47 crc kubenswrapper[4663]: I1212 14:04:47.946064 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:47Z","lastTransitionTime":"2025-12-12T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.048536 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.048581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.048592 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.048611 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.048625 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.150891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.150930 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.150943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.150961 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.150971 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.252943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.253176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.253255 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.253320 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.253380 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.355416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.355554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.355632 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.355705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.355783 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.457544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.457594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.457605 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.457625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.457635 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.559625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.559677 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.559687 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.559705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.559716 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.661490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.661532 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.661542 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.661559 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.661568 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.763842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.763882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.763893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.763907 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.763915 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.866034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.866083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.866096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.866115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.866128 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.869299 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:48 crc kubenswrapper[4663]: E1212 14:04:48.869400 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.955082 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:48 crc kubenswrapper[4663]: E1212 14:04:48.955188 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:48 crc kubenswrapper[4663]: E1212 14:04:48.955242 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:05:20.955226933 +0000 UTC m=+100.380644988 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.968203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.968240 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.968250 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.968262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:48 crc kubenswrapper[4663]: I1212 14:04:48.968272 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:48Z","lastTransitionTime":"2025-12-12T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.069975 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.070007 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.070017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.070028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.070037 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.172000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.172032 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.172043 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.172056 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.172064 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.274177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.274224 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.274234 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.274247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.274256 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.376302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.376442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.376527 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.376588 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.376660 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.478408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.478443 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.478451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.478465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.478476 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.579904 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.579935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.579944 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.579959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.579968 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.682266 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.682312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.682322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.682337 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.682348 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.784099 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.784125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.784133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.784144 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.784152 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.869437 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:49 crc kubenswrapper[4663]: E1212 14:04:49.869543 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.869439 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.869450 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:49 crc kubenswrapper[4663]: E1212 14:04:49.869656 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:49 crc kubenswrapper[4663]: E1212 14:04:49.869849 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.885626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.885654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.885663 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.885673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.885682 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.987224 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.987253 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.987262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.987273 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:49 crc kubenswrapper[4663]: I1212 14:04:49.987279 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:49Z","lastTransitionTime":"2025-12-12T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.089222 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.089249 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.089258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.089268 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.089275 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.135397 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/0.log" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.135435 4663 generic.go:334] "Generic (PLEG): container finished" podID="262620a9-ff94-42e0-a449-6c1a022f9b77" containerID="8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb" exitCode=1 Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.135458 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerDied","Data":"8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.135736 4663 scope.go:117] "RemoveContainer" containerID="8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.148292 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.157707 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.166787 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.175130 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.183202 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.190829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.190863 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.190873 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.190885 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.190893 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.192388 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:49Z\\\",\\\"message\\\":\\\"2025-12-12T14:04:04+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637\\\\n2025-12-12T14:04:04+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637 to /host/opt/cni/bin/\\\\n2025-12-12T14:04:04Z [verbose] multus-daemon started\\\\n2025-12-12T14:04:04Z [verbose] Readiness Indicator file check\\\\n2025-12-12T14:04:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.205155 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.219508 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.229952 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.239867 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.249882 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.260177 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.270374 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.278333 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.285058 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.292318 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.293139 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.293171 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.293180 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.293193 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.293220 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.299812 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.396451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.396482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.396493 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.396512 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.396526 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.498548 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.498579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.498588 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.498619 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.498629 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.600845 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.600908 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.600921 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.600938 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.600952 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.703728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.703835 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.703850 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.703874 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.703886 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.806101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.806140 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.806149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.806163 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.806176 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.869639 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:50 crc kubenswrapper[4663]: E1212 14:04:50.870010 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.883318 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.891048 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.902076 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.908571 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.908606 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.908614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.908629 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.908639 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:50Z","lastTransitionTime":"2025-12-12T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.911890 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.921863 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.932839 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.946307 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.954975 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.962341 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.970928 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.978380 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.986853 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:49Z\\\",\\\"message\\\":\\\"2025-12-12T14:04:04+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637\\\\n2025-12-12T14:04:04+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637 to /host/opt/cni/bin/\\\\n2025-12-12T14:04:04Z [verbose] multus-daemon started\\\\n2025-12-12T14:04:04Z [verbose] Readiness Indicator file check\\\\n2025-12-12T14:04:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:50 crc kubenswrapper[4663]: I1212 14:04:50.996413 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:50Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.008256 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.010332 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.010357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.010367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.010379 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.010388 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.016994 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.024165 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.036167 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.111569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.111603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.111611 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.111627 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.111635 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.141999 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/0.log" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.142050 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerStarted","Data":"f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.155109 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.167465 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.177067 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.185877 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.193714 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.201218 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.208419 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.213673 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.213702 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.213711 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.213725 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.213734 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.221988 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.231624 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.240121 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.248666 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.259108 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.267030 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.276488 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:49Z\\\",\\\"message\\\":\\\"2025-12-12T14:04:04+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637\\\\n2025-12-12T14:04:04+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637 to /host/opt/cni/bin/\\\\n2025-12-12T14:04:04Z [verbose] multus-daemon started\\\\n2025-12-12T14:04:04Z [verbose] Readiness Indicator file check\\\\n2025-12-12T14:04:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.288600 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.297884 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.305925 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:51Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.315433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.315460 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.315470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.315484 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.315496 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.417660 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.417696 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.417708 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.417725 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.417736 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.519545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.519575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.519584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.519599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.519608 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.620769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.620810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.620819 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.620834 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.620842 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.723438 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.723468 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.723478 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.723490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.723501 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.826066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.826112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.826125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.826142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.826154 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.869434 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.869480 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.869429 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:51 crc kubenswrapper[4663]: E1212 14:04:51.869559 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:51 crc kubenswrapper[4663]: E1212 14:04:51.869640 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:51 crc kubenswrapper[4663]: E1212 14:04:51.869713 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.928089 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.928161 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.928176 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.928203 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:51 crc kubenswrapper[4663]: I1212 14:04:51.928217 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:51Z","lastTransitionTime":"2025-12-12T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.030529 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.030577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.030588 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.030598 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.030607 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.100572 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.100626 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.100640 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.100662 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.100689 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.111300 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:52Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.114859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.114918 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.114931 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.114950 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.114961 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.123712 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:52Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.126397 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.126433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.126446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.126459 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.126473 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.135491 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:52Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.138567 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.138603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.138615 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.138627 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.138635 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.148478 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:52Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.153564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.153602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.153613 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.153628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.153640 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.164512 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:04:52Z is after 2025-08-24T17:21:41Z" Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.164635 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.165926 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.165954 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.165983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.165995 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.166005 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.268048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.268103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.268113 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.268131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.268143 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.370728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.370796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.370808 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.370822 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.370833 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.472669 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.472704 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.472718 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.472731 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.472757 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.574711 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.574772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.574787 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.574802 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.574812 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.677244 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.677416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.677480 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.677569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.677642 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.780136 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.780190 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.780200 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.780215 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.780225 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.869141 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:52 crc kubenswrapper[4663]: E1212 14:04:52.869243 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.881984 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.882038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.882048 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.882062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.882071 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.984091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.984122 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.984130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.984143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:52 crc kubenswrapper[4663]: I1212 14:04:52.984152 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:52Z","lastTransitionTime":"2025-12-12T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.085923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.085956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.085964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.085978 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.085988 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.187953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.187982 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.187990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.188003 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.188012 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.290122 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.290163 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.290174 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.290189 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.290198 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.392172 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.392209 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.392218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.392232 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.392241 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.493847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.493874 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.493884 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.493896 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.493904 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.595769 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.595805 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.595816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.595830 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.595840 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.697657 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.697683 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.697690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.697702 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.697711 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.799488 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.799526 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.799535 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.799549 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.799558 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.869625 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.869703 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:53 crc kubenswrapper[4663]: E1212 14:04:53.869831 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.869863 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:53 crc kubenswrapper[4663]: E1212 14:04:53.869929 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:53 crc kubenswrapper[4663]: E1212 14:04:53.869985 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.901288 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.901318 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.901330 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.901342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:53 crc kubenswrapper[4663]: I1212 14:04:53.901352 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:53Z","lastTransitionTime":"2025-12-12T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.002992 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.003015 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.003024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.003034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.003041 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.104305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.104341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.104355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.104367 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.104376 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.206258 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.206284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.206292 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.206303 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.206311 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.307506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.307537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.307545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.307554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.307561 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.409481 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.409520 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.409529 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.409544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.409553 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.510830 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.510868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.510878 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.510893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.510904 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.612610 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.612644 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.612654 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.612667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.612676 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.714052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.714098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.714109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.714119 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.714128 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.815452 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.815482 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.815490 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.815502 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.815510 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.869085 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:54 crc kubenswrapper[4663]: E1212 14:04:54.869201 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.917383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.917417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.917425 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.917436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:54 crc kubenswrapper[4663]: I1212 14:04:54.917445 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:54Z","lastTransitionTime":"2025-12-12T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.019666 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.019697 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.019705 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.019729 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.019738 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.121342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.121369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.121377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.121392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.121403 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.222771 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.222806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.222816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.222829 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.222837 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.325047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.325096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.325106 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.325120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.325130 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.427100 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.427126 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.427134 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.427145 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.427172 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.529142 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.529173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.529182 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.529193 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.529200 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.631110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.631157 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.631166 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.631178 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.631186 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.732879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.732913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.732923 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.732937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.732948 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.834879 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.834933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.834944 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.834956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.834965 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.869298 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.869301 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.869312 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:55 crc kubenswrapper[4663]: E1212 14:04:55.869385 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:55 crc kubenswrapper[4663]: E1212 14:04:55.869534 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:55 crc kubenswrapper[4663]: E1212 14:04:55.869442 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.936035 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.936061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.936070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.936079 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:55 crc kubenswrapper[4663]: I1212 14:04:55.936086 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:55Z","lastTransitionTime":"2025-12-12T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.037562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.037594 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.037602 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.037614 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.037624 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.139074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.139109 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.139120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.139133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.139142 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.241149 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.241183 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.241191 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.241202 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.241210 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.343271 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.343297 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.343305 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.343316 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.343326 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.444414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.444436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.444444 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.444454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.444461 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.545846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.545883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.545891 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.545905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.545915 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.647515 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.647586 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.647600 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.647616 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.647627 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.749408 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.749442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.749453 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.749464 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.749472 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.851212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.851239 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.851247 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.851257 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.851264 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.869587 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:56 crc kubenswrapper[4663]: E1212 14:04:56.869708 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.952952 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.952978 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.952987 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.952997 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:56 crc kubenswrapper[4663]: I1212 14:04:56.953004 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:56Z","lastTransitionTime":"2025-12-12T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.054899 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.054925 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.054934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.054945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.054952 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.158000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.158042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.158052 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.158066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.158078 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.259623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.259651 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.259676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.259707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.259714 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.361783 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.361810 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.361844 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.361855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.361864 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.463328 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.463355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.463363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.463376 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.463385 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.564959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.565006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.565016 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.565032 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.565042 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.667068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.667103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.667112 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.667127 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.667136 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.768830 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.768860 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.768869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.768880 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.768888 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.869903 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:57 crc kubenswrapper[4663]: E1212 14:04:57.870003 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.870024 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.870093 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:57 crc kubenswrapper[4663]: E1212 14:04:57.870169 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:57 crc kubenswrapper[4663]: E1212 14:04:57.870220 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.870994 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.871022 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.871030 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.871042 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.871052 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.972764 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.972789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.972797 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.972807 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:57 crc kubenswrapper[4663]: I1212 14:04:57.972815 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:57Z","lastTransitionTime":"2025-12-12T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.074623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.074842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.074911 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.074985 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.075047 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.177083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.177104 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.177129 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.177141 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.177148 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.278402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.278423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.278431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.278442 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.278449 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.380058 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.380088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.380098 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.380110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.380119 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.481592 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.481615 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.481623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.481634 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.481658 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.583728 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.583785 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.583795 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.583809 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.583818 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.685470 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.685519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.685528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.685539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.685546 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.786965 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.786998 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.787007 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.787020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.787029 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.869636 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:04:58 crc kubenswrapper[4663]: E1212 14:04:58.869779 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.888138 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.888165 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.888173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.888183 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.888191 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.990036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.990063 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.990070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.990080 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:58 crc kubenswrapper[4663]: I1212 14:04:58.990089 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:58Z","lastTransitionTime":"2025-12-12T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.092082 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.092110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.092118 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.092130 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.092138 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.193374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.193400 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.193417 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.193428 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.193435 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.294600 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.294643 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.294668 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.294677 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.294683 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.396321 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.396352 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.396360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.396374 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.396383 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.498377 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.498405 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.498414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.498424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.498431 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.600486 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.600509 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.600516 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.600525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.600532 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.702071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.702101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.702110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.702120 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.702128 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.803983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.804023 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.804033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.804047 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.804056 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.869837 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.869874 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.869890 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:04:59 crc kubenswrapper[4663]: E1212 14:04:59.869919 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:04:59 crc kubenswrapper[4663]: E1212 14:04:59.869975 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:04:59 crc kubenswrapper[4663]: E1212 14:04:59.870054 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.906267 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.906293 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.906301 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.906312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:04:59 crc kubenswrapper[4663]: I1212 14:04:59.906321 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:04:59Z","lastTransitionTime":"2025-12-12T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.008036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.008065 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.008074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.008085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.008092 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.110033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.110068 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.110078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.110091 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.110099 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.211905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.211934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.211942 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.211953 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.211963 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.313897 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.313929 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.313937 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.313949 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.313959 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.415928 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.415951 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.415959 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.415969 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.415976 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.517847 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.517882 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.517890 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.517901 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.517908 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.619522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.619556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.619569 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.619582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.619590 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.721293 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.721341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.721351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.721362 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.721371 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.823045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.823070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.823078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.823088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.823095 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.870164 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:00 crc kubenswrapper[4663]: E1212 14:05:00.870417 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.870640 4663 scope.go:117] "RemoveContainer" containerID="3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.877481 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.879278 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.886109 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.894682 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.902425 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.910097 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.918054 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.924129 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.924369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.924394 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.924402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.924414 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.924422 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:00Z","lastTransitionTime":"2025-12-12T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.930915 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.940665 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.950679 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.963339 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.971800 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.979658 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.987842 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:00 crc kubenswrapper[4663]: I1212 14:05:00.995482 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:00Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.002473 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.010921 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:49Z\\\",\\\"message\\\":\\\"2025-12-12T14:04:04+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637\\\\n2025-12-12T14:04:04+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637 to /host/opt/cni/bin/\\\\n2025-12-12T14:04:04Z [verbose] multus-daemon started\\\\n2025-12-12T14:04:04Z [verbose] Readiness Indicator file check\\\\n2025-12-12T14:04:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.026290 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.026311 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.026319 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.026332 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.026340 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.131110 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.131135 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.131143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.131154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.131163 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.167419 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/2.log" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.169009 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.169420 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.177957 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.184537 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.192378 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:49Z\\\",\\\"message\\\":\\\"2025-12-12T14:04:04+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637\\\\n2025-12-12T14:04:04+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637 to /host/opt/cni/bin/\\\\n2025-12-12T14:04:04Z [verbose] multus-daemon started\\\\n2025-12-12T14:04:04Z [verbose] Readiness Indicator file check\\\\n2025-12-12T14:04:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.201550 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.213003 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.220527 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.227531 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.232976 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.233000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.233008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.233020 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.233027 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.235571 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.243976 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.250730 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28185d3f-2b53-40ec-b1ac-37a08f301f23\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaba706e397747a3b362494fc815bd9899db690be915307abef9830c977dbf89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fd0fc85216032afb1427ac5d00359aac77c82bce5e8c30b3707aaf75c65ce9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fd0fc85216032afb1427ac5d00359aac77c82bce5e8c30b3707aaf75c65ce9b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.257406 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.266855 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.275783 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.285183 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.292704 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.300794 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.308274 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.319059 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:01Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.334876 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.334910 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.334919 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.334933 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.334942 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.437090 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.437133 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.437143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.437156 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.437163 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.539038 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.539066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.539074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.539086 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.539093 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.641222 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.641440 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.641506 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.641562 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.641627 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.744044 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.744081 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.744093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.744107 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.744117 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.768577 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.768763 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.768725519 +0000 UTC m=+145.194143573 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.845932 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.845963 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.845971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.845983 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.845991 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869197 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869280 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869206 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869347 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869289 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869359 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869373 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869207 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869397 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869419 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.869440 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869462 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869383 4663 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869505 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869514 4663 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869518 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.869507093 +0000 UTC m=+145.294925147 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869522 4663 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869479 4663 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869553 4663 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869541 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.869536397 +0000 UTC m=+145.294954451 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869590 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.86957535 +0000 UTC m=+145.294993404 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: E1212 14:05:01.869615 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.86959661 +0000 UTC m=+145.295014664 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.947644 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.947674 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.947682 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.947694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:01 crc kubenswrapper[4663]: I1212 14:05:01.947702 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:01Z","lastTransitionTime":"2025-12-12T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.049842 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.049867 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.049875 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.049886 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.049894 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.151322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.151347 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.151355 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.151364 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.151371 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.171807 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/3.log" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.172192 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/2.log" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.174154 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" exitCode=1 Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.174179 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.174208 4663 scope.go:117] "RemoveContainer" containerID="3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.174560 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.174690 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.184125 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.191881 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.198225 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sqmhz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aeda098f-80ee-41c3-b54d-fa390fd0e3ec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cba9bc6fee1c36efa261a0f1ffe9de150ed0161f312dde127af276c8c7932dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m24pq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sqmhz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.205990 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-f2n4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"262620a9-ff94-42e0-a449-6c1a022f9b77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:49Z\\\",\\\"message\\\":\\\"2025-12-12T14:04:04+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637\\\\n2025-12-12T14:04:04+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a3a79f56-aa62-48f9-80ae-52b39e13f637 to /host/opt/cni/bin/\\\\n2025-12-12T14:04:04Z [verbose] multus-daemon started\\\\n2025-12-12T14:04:04Z [verbose] Readiness Indicator file check\\\\n2025-12-12T14:04:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8gh6f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-f2n4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.214758 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e41f4560-4fe0-4b87-bb7b-a3055733abba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d243f16926a234bf7a1a3de9b8ef3ebcae632f14af98f30eb67d978038b00b92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f166a10f8ccf484fa2084b97c485e5d7d31dee217eb096725bbf52dcbe012503\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb21b4be09b2908e46b109ee2117fae85ef80d84f8029431b34a07d43b4b9ed1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5bff80dd475262c87c5a31b75ea9d50be2dfe78dc45c2550d9921844fa29a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4b77b927c70537e003aa21e5b8c53e0ce1490a79aac266f7bd2db3afe3c70e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e8d4403dd817f7e6947615282009ab5c122dba10c3bfaa3d731b06694bb9469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6d538c7a52e43e89ff2c3dd1f196ce217c5655d12f29085117554745a105938\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vr9dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kvqpx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.226022 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89a20840-03d2-434f-a5a1-3e71288c3ec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3aadd83e50b68b4cf56358e20815eb3e99cf43c7b96a71f38e2240e0399acb56\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:04:30Z\\\",\\\"message\\\":\\\"etwork=default are: map[]\\\\nI1212 14:04:30.452782 6379 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-kvqpx in node crc\\\\nI1212 14:04:30.452776 6379 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:04:30.452788 6379 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-kvqpx after 0 failed attempt(s)\\\\nF1212 14:04:30.452787 6379 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-12T14:05:01Z\\\",\\\"message\\\":\\\"Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1212 14:05:01.450021 6852 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF1212 14:05:01.449966 6852 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:04:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:04:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6skt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rz99s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.233888 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.240778 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:00Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cd94b5ba1e467cb65c4e25d278239110c5e0077252bbf6818cc44626dacfd80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.247160 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91ce7def-b363-4fe9-8d21-b161e64d0f1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9r5mq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9t6qf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.253629 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.253662 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.253671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.253682 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.253691 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.254794 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9eb143d-ff34-4c12-9136-1b28281e4a94\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://027fe8bd6a02eb34eb161ed744af9dacb49cbfb982772d127bbd4fc8daa7c7be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f95708c7d6fc447a90ebb9855b51cb107b3e7f604c69f486b2fc0f9be916b48\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7769d13f75754464ed54c46b49a8d99ca04ee73529d35b4e0c20ec16c383cd7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.261037 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28185d3f-2b53-40ec-b1ac-37a08f301f23\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaba706e397747a3b362494fc815bd9899db690be915307abef9830c977dbf89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fd0fc85216032afb1427ac5d00359aac77c82bce5e8c30b3707aaf75c65ce9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fd0fc85216032afb1427ac5d00359aac77c82bce5e8c30b3707aaf75c65ce9b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.268490 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a64d3f95cc20dc3afca140b4f027f6e083eb3e937fba89612e4df5ad1b9079db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466cd03b645142059f2ce85b8d3971d026d86a09a772b7f74375f076a9894bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.276419 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"804f0066-151b-4960-aa1e-fe6346e19f98\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-12T14:03:57Z\\\",\\\"message\\\":\\\"le observer\\\\nW1212 14:03:57.615988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1212 14:03:57.616102 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1212 14:03:57.617877 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536849458/tls.crt::/tmp/serving-cert-3536849458/tls.key\\\\\\\"\\\\nI1212 14:03:57.824491 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1212 14:03:57.826664 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1212 14:03:57.826709 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1212 14:03:57.826759 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1212 14:03:57.826788 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1212 14:03:57.830213 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1212 14:03:57.830264 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1212 14:03:57.830266 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830312 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1212 14:03:57.830333 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1212 14:03:57.830351 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1212 14:03:57.830368 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1212 14:03:57.830384 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1212 14:03:57.831645 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.284192 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb72d7f8c20f16ce053d06706468c78399488e4421dc642e6ddc332c9dd035b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.290199 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-dr24h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bfcf4b6-a84c-4c32-a79e-a17bf8776478\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c4087b8c66e7683a7dae280cb2acdd3c2502f861b2fdb605ce02ad7e4ad053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9spgg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-dr24h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.296788 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74a58ec6-61d4-48ec-ad74-739c778f89b5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa1c92a004820fe0423b939bb0d5c7df2da761e4b6b5dd27770f22bd15c1b1d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdxxz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kgwm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.303528 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09e108fb-f53f-46db-b289-c45cb40811f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8d9f0fc80c9919865ac604c130b0d058df5be0c50fb7060a936329d4e225b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4fea519b9a9d8828639e410b2ae2c787e12efc0b7e56f018b7ad1a5e84b35337\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s9bsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:04:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z766m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.311153 4663 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11d98651-f507-4ad4-a1ac-82bd5e81fdda\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:04:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-12T14:03:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed6cae7b6b3fb5ab83b47043020500ececd677afd6d97f1b4e739c58956a9570\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac14dc10d23224d751ac5a806650437d12559f71cefb7c40d9f5f2bf10456078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7baad0bb345278e7017947bcfe4c91c1e795d8ae159d7c44a59590f4ce233ad1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-12T14:03:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11128ee6ba1629fd7de24d9c836febd1659b78b8f9011315019d74a91a97040d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-12T14:03:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-12T14:03:41Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-12T14:03:40Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.355714 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.355760 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.355770 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.355783 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.355791 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.407518 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.407545 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.407554 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.407564 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.407571 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.415053 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.419312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.419340 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.419349 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.419360 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.419369 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.427116 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.429540 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.429568 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.429577 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.429587 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.429607 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.437223 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.439064 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.439093 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.439102 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.439114 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.439123 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.446455 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.448573 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.448619 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.448628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.448638 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.448647 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.455880 4663 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-12T14:05:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fd6d9db9-7d59-43ab-8347-890bc49c2a90\\\",\\\"systemUUID\\\":\\\"634273e5-e49c-4fb3-a677-4f289806af55\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-12T14:05:02Z is after 2025-08-24T17:21:41Z" Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.455979 4663 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.456791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.456818 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.456827 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.456836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.456844 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.558400 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.558436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.558445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.558455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.558462 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.660070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.660095 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.660103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.660111 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.660119 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.762040 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.762066 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.762074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.762083 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.762089 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.863388 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.863415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.863423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.863432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.863439 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.870020 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:02 crc kubenswrapper[4663]: E1212 14:05:02.870113 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.965380 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.965410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.965421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.965432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:02 crc kubenswrapper[4663]: I1212 14:05:02.965440 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:02Z","lastTransitionTime":"2025-12-12T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.067342 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.067373 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.067381 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.067393 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.067402 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.169167 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.169202 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.169212 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.169224 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.169234 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.177081 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/3.log" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.179372 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:05:03 crc kubenswrapper[4663]: E1212 14:05:03.179484 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.200163 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=28.200153542 podStartE2EDuration="28.200153542s" podCreationTimestamp="2025-12-12 14:04:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.193865833 +0000 UTC m=+82.619283887" watchObservedRunningTime="2025-12-12 14:05:03.200153542 +0000 UTC m=+82.625571596" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.207372 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-dr24h" podStartSLOduration=60.207364755 podStartE2EDuration="1m0.207364755s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.200290338 +0000 UTC m=+82.625708392" watchObservedRunningTime="2025-12-12 14:05:03.207364755 +0000 UTC m=+82.632782809" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.207459 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podStartSLOduration=60.207452869 podStartE2EDuration="1m0.207452869s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.207238729 +0000 UTC m=+82.632656783" watchObservedRunningTime="2025-12-12 14:05:03.207452869 +0000 UTC m=+82.632870933" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.223036 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z766m" podStartSLOduration=59.223028027 podStartE2EDuration="59.223028027s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.214382998 +0000 UTC m=+82.639801052" watchObservedRunningTime="2025-12-12 14:05:03.223028027 +0000 UTC m=+82.648446082" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.263818 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-f2n4r" podStartSLOduration=60.263642673 podStartE2EDuration="1m0.263642673s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.262334 +0000 UTC m=+82.687752053" watchObservedRunningTime="2025-12-12 14:05:03.263642673 +0000 UTC m=+82.689060777" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.264033 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-sqmhz" podStartSLOduration=60.264029816 podStartE2EDuration="1m0.264029816s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.252273427 +0000 UTC m=+82.677691481" watchObservedRunningTime="2025-12-12 14:05:03.264029816 +0000 UTC m=+82.689447870" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.271291 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.271323 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.271331 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.271344 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.271352 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.276171 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-kvqpx" podStartSLOduration=60.276165182 podStartE2EDuration="1m0.276165182s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.275339991 +0000 UTC m=+82.700758045" watchObservedRunningTime="2025-12-12 14:05:03.276165182 +0000 UTC m=+82.701583235" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.301396 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=64.301384395 podStartE2EDuration="1m4.301384395s" podCreationTimestamp="2025-12-12 14:03:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.300903487 +0000 UTC m=+82.726321541" watchObservedRunningTime="2025-12-12 14:05:03.301384395 +0000 UTC m=+82.726802449" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.307675 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=3.307665772 podStartE2EDuration="3.307665772s" podCreationTimestamp="2025-12-12 14:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.307286343 +0000 UTC m=+82.732704398" watchObservedRunningTime="2025-12-12 14:05:03.307665772 +0000 UTC m=+82.733083826" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.332232 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=66.332220374 podStartE2EDuration="1m6.332220374s" podCreationTimestamp="2025-12-12 14:03:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:03.323213929 +0000 UTC m=+82.748631984" watchObservedRunningTime="2025-12-12 14:05:03.332220374 +0000 UTC m=+82.757638428" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.373563 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.373603 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.373612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.373624 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.373632 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.475369 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.475533 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.475612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.475688 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.475765 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.577410 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.577445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.577454 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.577466 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.577476 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.679011 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.679125 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.679184 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.679251 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.679311 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.781363 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.781394 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.781404 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.781415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.781428 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.869504 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.869529 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.869535 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:03 crc kubenswrapper[4663]: E1212 14:05:03.869611 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:03 crc kubenswrapper[4663]: E1212 14:05:03.869683 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:03 crc kubenswrapper[4663]: E1212 14:05:03.869782 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.882772 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.882796 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.882805 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.882816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.882825 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.984500 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.984528 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.984537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.984547 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:03 crc kubenswrapper[4663]: I1212 14:05:03.984555 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:03Z","lastTransitionTime":"2025-12-12T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.086298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.086322 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.086330 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.086341 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.086349 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.188195 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.188220 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.188228 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.188238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.188245 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.290510 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.290575 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.290584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.290599 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.290608 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.392351 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.392389 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.392401 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.392415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.392424 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.494375 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.494409 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.494418 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.494431 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.494441 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.595970 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.595996 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.596004 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.596015 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.596023 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.697695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.697727 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.697736 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.697762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.697771 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.799800 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.799837 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.799846 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.799858 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.799867 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.869657 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:04 crc kubenswrapper[4663]: E1212 14:05:04.869954 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.901716 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.901766 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.901778 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.901791 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:04 crc kubenswrapper[4663]: I1212 14:05:04.901801 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:04Z","lastTransitionTime":"2025-12-12T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.003441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.003476 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.003484 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.003495 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.003505 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.105954 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.105980 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.105990 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.106002 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.106011 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.208085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.208115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.208141 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.208154 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.208162 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.309784 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.309815 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.309823 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.309836 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.309843 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.411661 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.411686 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.411695 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.411709 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.411720 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.513449 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.513503 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.513513 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.513524 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.513532 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.615497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.615555 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.615565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.615576 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.615584 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.717238 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.717265 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.717272 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.717284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.717292 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.819343 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.819378 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.819387 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.819398 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.819406 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.869953 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.870017 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:05 crc kubenswrapper[4663]: E1212 14:05:05.870049 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.869953 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:05 crc kubenswrapper[4663]: E1212 14:05:05.870102 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:05 crc kubenswrapper[4663]: E1212 14:05:05.870225 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.921085 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.921108 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.921117 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.921128 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:05 crc kubenswrapper[4663]: I1212 14:05:05.921136 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:05Z","lastTransitionTime":"2025-12-12T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.022992 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.023019 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.023028 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.023037 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.023045 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.124782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.124816 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.124825 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.124838 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.124846 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.226522 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.226565 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.226574 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.226585 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.226593 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.328679 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.328713 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.328721 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.328732 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.328740 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.430062 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.430094 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.430103 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.430115 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.430124 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.531556 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.531861 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.531869 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.531883 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.531892 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.633658 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.633690 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.633699 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.633712 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.633720 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.735694 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.735731 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.735762 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.735777 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.735785 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.836943 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.837061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.837143 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.837218 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.837298 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.870013 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:06 crc kubenswrapper[4663]: E1212 14:05:06.870115 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.939832 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.939859 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.939868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.939881 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:06 crc kubenswrapper[4663]: I1212 14:05:06.939890 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:06Z","lastTransitionTime":"2025-12-12T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.042178 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.042336 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.042416 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.042497 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.042575 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.144061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.144504 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.144579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.144646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.144708 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.246406 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.246441 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.246451 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.246465 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.246475 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.348019 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.348050 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.348059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.348070 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.348079 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.449934 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.449966 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.449974 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.449986 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.449994 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.552034 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.552076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.552088 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.552101 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.552111 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.654044 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.654076 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.654084 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.654096 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.654104 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.755608 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.755641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.755650 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.755663 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.755671 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.857584 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.857617 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.857628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.857641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.857649 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.869604 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.869605 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.869667 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:07 crc kubenswrapper[4663]: E1212 14:05:07.869790 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:07 crc kubenswrapper[4663]: E1212 14:05:07.869841 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:07 crc kubenswrapper[4663]: E1212 14:05:07.869891 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.959638 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.959667 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.959676 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.959688 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:07 crc kubenswrapper[4663]: I1212 14:05:07.959699 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:07Z","lastTransitionTime":"2025-12-12T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.061570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.061671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.061681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.061692 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.061704 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.163137 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.163168 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.163177 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.163189 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.163197 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.264913 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.264945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.264954 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.264964 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.264972 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.366734 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.366868 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.366878 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.366893 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.366902 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.468633 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.468671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.468684 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.468697 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.468706 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.570281 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.570327 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.570338 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.570350 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.570358 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.672357 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.672384 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.672391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.672402 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.672410 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.773888 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.774013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.774071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.774131 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.774197 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.869390 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:08 crc kubenswrapper[4663]: E1212 14:05:08.869613 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.875945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.875991 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.876001 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.876013 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.876022 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.977571 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.977609 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.977623 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.977639 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:08 crc kubenswrapper[4663]: I1212 14:05:08.977648 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:08Z","lastTransitionTime":"2025-12-12T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.079071 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.079197 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.079262 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.079324 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.079387 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.180978 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.181000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.181008 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.181017 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.181024 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.282392 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.282424 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.282433 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.282446 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.282455 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.383941 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.383971 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.383979 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.383991 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.384027 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.486006 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.486033 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.486137 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.486152 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.486160 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.587634 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.587664 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.587671 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.587707 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.587717 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.689395 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.689436 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.689445 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.689455 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.689465 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.791539 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.791582 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.791592 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.791604 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.791613 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.869640 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.869663 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:09 crc kubenswrapper[4663]: E1212 14:05:09.869736 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.869781 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:09 crc kubenswrapper[4663]: E1212 14:05:09.869826 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:09 crc kubenswrapper[4663]: E1212 14:05:09.869879 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.893253 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.893284 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.893298 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.893310 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.893321 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.995036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.995059 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.995067 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.995078 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:09 crc kubenswrapper[4663]: I1212 14:05:09.995087 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:09Z","lastTransitionTime":"2025-12-12T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.096855 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.096905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.096915 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.096927 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.096934 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.198141 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.198456 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.198552 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.198625 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.198687 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.300485 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.300615 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.300681 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.300776 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.300850 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.402905 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.402935 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.402944 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.402956 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.402966 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.505000 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.505036 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.505045 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.505057 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.505065 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.606537 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.606570 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.606579 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.606589 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.606597 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.708024 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.708053 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.708061 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.708074 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.708083 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.809382 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.809411 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.809421 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.809432 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.809439 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.869781 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:10 crc kubenswrapper[4663]: E1212 14:05:10.870464 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.911601 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.911628 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.911636 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.911646 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:10 crc kubenswrapper[4663]: I1212 14:05:10.911654 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:10Z","lastTransitionTime":"2025-12-12T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.013581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.013612 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.013620 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.013632 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.013641 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.115186 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.115217 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.115225 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.115237 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.115245 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.217160 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.217189 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.217198 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.217210 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.217218 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.318606 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.318641 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.318651 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.318664 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.318673 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.420735 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.420780 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.420789 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.420801 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.420809 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.521945 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.521973 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.521981 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.521993 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.522002 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.623477 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.623510 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.623519 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.623530 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.623538 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.725383 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.725415 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.725423 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.725447 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.725457 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.827241 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.827269 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.827277 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.827287 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.827294 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.869113 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.869126 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.869189 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:11 crc kubenswrapper[4663]: E1212 14:05:11.869268 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:11 crc kubenswrapper[4663]: E1212 14:05:11.869331 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:11 crc kubenswrapper[4663]: E1212 14:05:11.869429 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.928786 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.928813 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.928821 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.928832 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:11 crc kubenswrapper[4663]: I1212 14:05:11.928841 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:11Z","lastTransitionTime":"2025-12-12T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.032173 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.032206 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.032215 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.032227 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.032236 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:12Z","lastTransitionTime":"2025-12-12T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.134544 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.134581 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.134593 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.134606 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.134615 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:12Z","lastTransitionTime":"2025-12-12T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.237499 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.237525 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.237536 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.237550 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.237557 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:12Z","lastTransitionTime":"2025-12-12T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.339267 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.339302 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.339312 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.339328 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.339339 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:12Z","lastTransitionTime":"2025-12-12T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.441782 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.441806 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.441817 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.441830 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.441838 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:12Z","lastTransitionTime":"2025-12-12T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.508391 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.508457 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.508469 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.508486 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.508496 4663 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-12T14:05:12Z","lastTransitionTime":"2025-12-12T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.535312 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp"] Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.535623 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.536844 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.537696 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.537696 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.538623 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.655812 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c049311f-efb2-4d9a-9905-ce2176034b88-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.655860 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c049311f-efb2-4d9a-9905-ce2176034b88-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.655898 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c049311f-efb2-4d9a-9905-ce2176034b88-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.655929 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c049311f-efb2-4d9a-9905-ce2176034b88-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.655948 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c049311f-efb2-4d9a-9905-ce2176034b88-service-ca\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756372 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c049311f-efb2-4d9a-9905-ce2176034b88-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756404 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c049311f-efb2-4d9a-9905-ce2176034b88-service-ca\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756440 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c049311f-efb2-4d9a-9905-ce2176034b88-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756462 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c049311f-efb2-4d9a-9905-ce2176034b88-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756499 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c049311f-efb2-4d9a-9905-ce2176034b88-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756550 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c049311f-efb2-4d9a-9905-ce2176034b88-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.756612 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c049311f-efb2-4d9a-9905-ce2176034b88-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.757303 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c049311f-efb2-4d9a-9905-ce2176034b88-service-ca\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.762480 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c049311f-efb2-4d9a-9905-ce2176034b88-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.771401 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c049311f-efb2-4d9a-9905-ce2176034b88-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-v9tnp\" (UID: \"c049311f-efb2-4d9a-9905-ce2176034b88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.845311 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" Dec 12 14:05:12 crc kubenswrapper[4663]: I1212 14:05:12.870046 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:12 crc kubenswrapper[4663]: E1212 14:05:12.870175 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:13 crc kubenswrapper[4663]: I1212 14:05:13.205727 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" event={"ID":"c049311f-efb2-4d9a-9905-ce2176034b88","Type":"ContainerStarted","Data":"b8d3879abd2912a7876cf65d5864906ad00233d668ac52a5bea666396e0ed6db"} Dec 12 14:05:13 crc kubenswrapper[4663]: I1212 14:05:13.205800 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" event={"ID":"c049311f-efb2-4d9a-9905-ce2176034b88","Type":"ContainerStarted","Data":"f76cb66833084e91e37d3e61b2bd1c968ae60502c0530f70bb815416e232ff35"} Dec 12 14:05:13 crc kubenswrapper[4663]: I1212 14:05:13.218696 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-v9tnp" podStartSLOduration=70.218681292 podStartE2EDuration="1m10.218681292s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:13.218670472 +0000 UTC m=+92.644088536" watchObservedRunningTime="2025-12-12 14:05:13.218681292 +0000 UTC m=+92.644099346" Dec 12 14:05:13 crc kubenswrapper[4663]: I1212 14:05:13.869271 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:13 crc kubenswrapper[4663]: I1212 14:05:13.869321 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:13 crc kubenswrapper[4663]: I1212 14:05:13.869374 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:13 crc kubenswrapper[4663]: E1212 14:05:13.869394 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:13 crc kubenswrapper[4663]: E1212 14:05:13.869524 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:13 crc kubenswrapper[4663]: E1212 14:05:13.869583 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:14 crc kubenswrapper[4663]: I1212 14:05:14.869485 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:14 crc kubenswrapper[4663]: E1212 14:05:14.869613 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:14 crc kubenswrapper[4663]: I1212 14:05:14.870826 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:05:14 crc kubenswrapper[4663]: E1212 14:05:14.871065 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:05:15 crc kubenswrapper[4663]: I1212 14:05:15.870077 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:15 crc kubenswrapper[4663]: E1212 14:05:15.870185 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:15 crc kubenswrapper[4663]: I1212 14:05:15.870901 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:15 crc kubenswrapper[4663]: I1212 14:05:15.870955 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:15 crc kubenswrapper[4663]: E1212 14:05:15.871023 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:15 crc kubenswrapper[4663]: E1212 14:05:15.871138 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:16 crc kubenswrapper[4663]: I1212 14:05:16.869771 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:16 crc kubenswrapper[4663]: E1212 14:05:16.869901 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:17 crc kubenswrapper[4663]: I1212 14:05:17.869040 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:17 crc kubenswrapper[4663]: I1212 14:05:17.869075 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:17 crc kubenswrapper[4663]: I1212 14:05:17.869081 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:17 crc kubenswrapper[4663]: E1212 14:05:17.869133 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:17 crc kubenswrapper[4663]: E1212 14:05:17.869225 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:17 crc kubenswrapper[4663]: E1212 14:05:17.869293 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:18 crc kubenswrapper[4663]: I1212 14:05:18.869324 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:18 crc kubenswrapper[4663]: E1212 14:05:18.869438 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:19 crc kubenswrapper[4663]: I1212 14:05:19.869660 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:19 crc kubenswrapper[4663]: I1212 14:05:19.869662 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:19 crc kubenswrapper[4663]: I1212 14:05:19.870220 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:19 crc kubenswrapper[4663]: E1212 14:05:19.870378 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:19 crc kubenswrapper[4663]: E1212 14:05:19.870434 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:19 crc kubenswrapper[4663]: E1212 14:05:19.870487 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:20 crc kubenswrapper[4663]: I1212 14:05:20.869927 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:20 crc kubenswrapper[4663]: E1212 14:05:20.870696 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:21 crc kubenswrapper[4663]: I1212 14:05:21.034187 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:21 crc kubenswrapper[4663]: E1212 14:05:21.034337 4663 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:05:21 crc kubenswrapper[4663]: E1212 14:05:21.034391 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs podName:91ce7def-b363-4fe9-8d21-b161e64d0f1c nodeName:}" failed. No retries permitted until 2025-12-12 14:06:25.034375876 +0000 UTC m=+164.459793930 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs") pod "network-metrics-daemon-9t6qf" (UID: "91ce7def-b363-4fe9-8d21-b161e64d0f1c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 12 14:05:21 crc kubenswrapper[4663]: I1212 14:05:21.869211 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:21 crc kubenswrapper[4663]: I1212 14:05:21.869457 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:21 crc kubenswrapper[4663]: I1212 14:05:21.869463 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:21 crc kubenswrapper[4663]: E1212 14:05:21.869620 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:21 crc kubenswrapper[4663]: E1212 14:05:21.869843 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:21 crc kubenswrapper[4663]: E1212 14:05:21.870079 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:22 crc kubenswrapper[4663]: I1212 14:05:22.869407 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:22 crc kubenswrapper[4663]: E1212 14:05:22.869513 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:23 crc kubenswrapper[4663]: I1212 14:05:23.869846 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:23 crc kubenswrapper[4663]: I1212 14:05:23.869872 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:23 crc kubenswrapper[4663]: E1212 14:05:23.869945 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:23 crc kubenswrapper[4663]: I1212 14:05:23.870070 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:23 crc kubenswrapper[4663]: E1212 14:05:23.870114 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:23 crc kubenswrapper[4663]: E1212 14:05:23.870469 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:23 crc kubenswrapper[4663]: I1212 14:05:23.880356 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 12 14:05:24 crc kubenswrapper[4663]: I1212 14:05:24.869769 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:24 crc kubenswrapper[4663]: E1212 14:05:24.869870 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:25 crc kubenswrapper[4663]: I1212 14:05:25.869868 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:25 crc kubenswrapper[4663]: I1212 14:05:25.870080 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:25 crc kubenswrapper[4663]: I1212 14:05:25.869928 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:25 crc kubenswrapper[4663]: E1212 14:05:25.870591 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:25 crc kubenswrapper[4663]: E1212 14:05:25.870762 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:25 crc kubenswrapper[4663]: E1212 14:05:25.870845 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:26 crc kubenswrapper[4663]: I1212 14:05:26.869226 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:26 crc kubenswrapper[4663]: E1212 14:05:26.869348 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:27 crc kubenswrapper[4663]: I1212 14:05:27.869434 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:27 crc kubenswrapper[4663]: I1212 14:05:27.869459 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:27 crc kubenswrapper[4663]: I1212 14:05:27.869489 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:27 crc kubenswrapper[4663]: E1212 14:05:27.869611 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:27 crc kubenswrapper[4663]: E1212 14:05:27.869661 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:27 crc kubenswrapper[4663]: E1212 14:05:27.869708 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:27 crc kubenswrapper[4663]: I1212 14:05:27.870117 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:05:27 crc kubenswrapper[4663]: E1212 14:05:27.870238 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:05:28 crc kubenswrapper[4663]: I1212 14:05:28.869658 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:28 crc kubenswrapper[4663]: E1212 14:05:28.869766 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:29 crc kubenswrapper[4663]: I1212 14:05:29.869391 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:29 crc kubenswrapper[4663]: I1212 14:05:29.869420 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:29 crc kubenswrapper[4663]: E1212 14:05:29.869479 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:29 crc kubenswrapper[4663]: I1212 14:05:29.869399 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:29 crc kubenswrapper[4663]: E1212 14:05:29.869628 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:29 crc kubenswrapper[4663]: E1212 14:05:29.869813 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:30 crc kubenswrapper[4663]: I1212 14:05:30.869790 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:30 crc kubenswrapper[4663]: E1212 14:05:30.870795 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:30 crc kubenswrapper[4663]: I1212 14:05:30.888045 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=7.888032574 podStartE2EDuration="7.888032574s" podCreationTimestamp="2025-12-12 14:05:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:30.887608611 +0000 UTC m=+110.313026666" watchObservedRunningTime="2025-12-12 14:05:30.888032574 +0000 UTC m=+110.313450627" Dec 12 14:05:31 crc kubenswrapper[4663]: I1212 14:05:31.869246 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:31 crc kubenswrapper[4663]: I1212 14:05:31.869287 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:31 crc kubenswrapper[4663]: E1212 14:05:31.869329 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:31 crc kubenswrapper[4663]: I1212 14:05:31.869246 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:31 crc kubenswrapper[4663]: E1212 14:05:31.869467 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:31 crc kubenswrapper[4663]: E1212 14:05:31.869499 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:32 crc kubenswrapper[4663]: I1212 14:05:32.869086 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:32 crc kubenswrapper[4663]: E1212 14:05:32.869198 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:33 crc kubenswrapper[4663]: I1212 14:05:33.869458 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:33 crc kubenswrapper[4663]: I1212 14:05:33.869477 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:33 crc kubenswrapper[4663]: I1212 14:05:33.869496 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:33 crc kubenswrapper[4663]: E1212 14:05:33.869568 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:33 crc kubenswrapper[4663]: E1212 14:05:33.869616 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:33 crc kubenswrapper[4663]: E1212 14:05:33.869820 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:34 crc kubenswrapper[4663]: I1212 14:05:34.869278 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:34 crc kubenswrapper[4663]: E1212 14:05:34.869410 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:35 crc kubenswrapper[4663]: I1212 14:05:35.869871 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:35 crc kubenswrapper[4663]: I1212 14:05:35.869898 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:35 crc kubenswrapper[4663]: I1212 14:05:35.869904 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:35 crc kubenswrapper[4663]: E1212 14:05:35.870004 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:35 crc kubenswrapper[4663]: E1212 14:05:35.870134 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:35 crc kubenswrapper[4663]: E1212 14:05:35.870199 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.258793 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/1.log" Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.259141 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/0.log" Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.259175 4663 generic.go:334] "Generic (PLEG): container finished" podID="262620a9-ff94-42e0-a449-6c1a022f9b77" containerID="f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09" exitCode=1 Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.259198 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerDied","Data":"f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09"} Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.259225 4663 scope.go:117] "RemoveContainer" containerID="8478ba41ee78980c7ad92ac44b10155c16d86c8f4a04854f4b5ba6c4d24f03cb" Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.259615 4663 scope.go:117] "RemoveContainer" containerID="f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09" Dec 12 14:05:36 crc kubenswrapper[4663]: E1212 14:05:36.259786 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-f2n4r_openshift-multus(262620a9-ff94-42e0-a449-6c1a022f9b77)\"" pod="openshift-multus/multus-f2n4r" podUID="262620a9-ff94-42e0-a449-6c1a022f9b77" Dec 12 14:05:36 crc kubenswrapper[4663]: I1212 14:05:36.869789 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:36 crc kubenswrapper[4663]: E1212 14:05:36.869899 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:37 crc kubenswrapper[4663]: I1212 14:05:37.261830 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/1.log" Dec 12 14:05:37 crc kubenswrapper[4663]: I1212 14:05:37.869885 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:37 crc kubenswrapper[4663]: E1212 14:05:37.869972 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:37 crc kubenswrapper[4663]: I1212 14:05:37.870043 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:37 crc kubenswrapper[4663]: E1212 14:05:37.870165 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:37 crc kubenswrapper[4663]: I1212 14:05:37.870596 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:37 crc kubenswrapper[4663]: E1212 14:05:37.870706 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:38 crc kubenswrapper[4663]: I1212 14:05:38.869787 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:38 crc kubenswrapper[4663]: E1212 14:05:38.869877 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:38 crc kubenswrapper[4663]: I1212 14:05:38.870412 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:05:38 crc kubenswrapper[4663]: E1212 14:05:38.870548 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rz99s_openshift-ovn-kubernetes(89a20840-03d2-434f-a5a1-3e71288c3ec5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" Dec 12 14:05:39 crc kubenswrapper[4663]: I1212 14:05:39.869525 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:39 crc kubenswrapper[4663]: I1212 14:05:39.869587 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:39 crc kubenswrapper[4663]: E1212 14:05:39.869634 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:39 crc kubenswrapper[4663]: I1212 14:05:39.869667 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:39 crc kubenswrapper[4663]: E1212 14:05:39.869793 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:39 crc kubenswrapper[4663]: E1212 14:05:39.869911 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:40 crc kubenswrapper[4663]: I1212 14:05:40.869181 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:40 crc kubenswrapper[4663]: E1212 14:05:40.870012 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:40 crc kubenswrapper[4663]: E1212 14:05:40.907644 4663 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 12 14:05:40 crc kubenswrapper[4663]: E1212 14:05:40.942066 4663 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 12 14:05:41 crc kubenswrapper[4663]: I1212 14:05:41.869288 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:41 crc kubenswrapper[4663]: I1212 14:05:41.869339 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:41 crc kubenswrapper[4663]: I1212 14:05:41.869380 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:41 crc kubenswrapper[4663]: E1212 14:05:41.869405 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:41 crc kubenswrapper[4663]: E1212 14:05:41.869477 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:41 crc kubenswrapper[4663]: E1212 14:05:41.869572 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:42 crc kubenswrapper[4663]: I1212 14:05:42.869148 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:42 crc kubenswrapper[4663]: E1212 14:05:42.869267 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:43 crc kubenswrapper[4663]: I1212 14:05:43.869223 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:43 crc kubenswrapper[4663]: I1212 14:05:43.869293 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:43 crc kubenswrapper[4663]: E1212 14:05:43.869324 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:43 crc kubenswrapper[4663]: I1212 14:05:43.869223 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:43 crc kubenswrapper[4663]: E1212 14:05:43.869398 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:43 crc kubenswrapper[4663]: E1212 14:05:43.869466 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:44 crc kubenswrapper[4663]: I1212 14:05:44.869834 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:44 crc kubenswrapper[4663]: E1212 14:05:44.870026 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:45 crc kubenswrapper[4663]: I1212 14:05:45.869999 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:45 crc kubenswrapper[4663]: I1212 14:05:45.870013 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:45 crc kubenswrapper[4663]: I1212 14:05:45.870176 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:45 crc kubenswrapper[4663]: E1212 14:05:45.870110 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:45 crc kubenswrapper[4663]: E1212 14:05:45.870233 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:45 crc kubenswrapper[4663]: E1212 14:05:45.870277 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:45 crc kubenswrapper[4663]: E1212 14:05:45.942728 4663 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 12 14:05:46 crc kubenswrapper[4663]: I1212 14:05:46.870027 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:46 crc kubenswrapper[4663]: E1212 14:05:46.870130 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:47 crc kubenswrapper[4663]: I1212 14:05:47.869525 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:47 crc kubenswrapper[4663]: I1212 14:05:47.869602 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:47 crc kubenswrapper[4663]: E1212 14:05:47.869624 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:47 crc kubenswrapper[4663]: I1212 14:05:47.869525 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:47 crc kubenswrapper[4663]: E1212 14:05:47.869703 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:47 crc kubenswrapper[4663]: E1212 14:05:47.869786 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:48 crc kubenswrapper[4663]: I1212 14:05:48.870089 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:48 crc kubenswrapper[4663]: E1212 14:05:48.870199 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:48 crc kubenswrapper[4663]: I1212 14:05:48.870282 4663 scope.go:117] "RemoveContainer" containerID="f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09" Dec 12 14:05:49 crc kubenswrapper[4663]: I1212 14:05:49.290567 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/1.log" Dec 12 14:05:49 crc kubenswrapper[4663]: I1212 14:05:49.290806 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerStarted","Data":"944368ce3445b677c275a699db9996a6dd2fe3dd988f20e63deb301c87d49c53"} Dec 12 14:05:49 crc kubenswrapper[4663]: I1212 14:05:49.869711 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:49 crc kubenswrapper[4663]: I1212 14:05:49.869822 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:49 crc kubenswrapper[4663]: E1212 14:05:49.869848 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:49 crc kubenswrapper[4663]: I1212 14:05:49.869728 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:49 crc kubenswrapper[4663]: E1212 14:05:49.869940 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:49 crc kubenswrapper[4663]: E1212 14:05:49.870212 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:50 crc kubenswrapper[4663]: I1212 14:05:50.868984 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:50 crc kubenswrapper[4663]: E1212 14:05:50.871276 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:50 crc kubenswrapper[4663]: E1212 14:05:50.943282 4663 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 12 14:05:51 crc kubenswrapper[4663]: I1212 14:05:51.869311 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:51 crc kubenswrapper[4663]: I1212 14:05:51.869349 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:51 crc kubenswrapper[4663]: I1212 14:05:51.869378 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:51 crc kubenswrapper[4663]: E1212 14:05:51.869450 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:51 crc kubenswrapper[4663]: E1212 14:05:51.869522 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:51 crc kubenswrapper[4663]: E1212 14:05:51.869640 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:52 crc kubenswrapper[4663]: I1212 14:05:52.870037 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:52 crc kubenswrapper[4663]: E1212 14:05:52.870179 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:53 crc kubenswrapper[4663]: I1212 14:05:53.869590 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:53 crc kubenswrapper[4663]: I1212 14:05:53.869654 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:53 crc kubenswrapper[4663]: E1212 14:05:53.869711 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:53 crc kubenswrapper[4663]: E1212 14:05:53.869819 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:53 crc kubenswrapper[4663]: I1212 14:05:53.869856 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:53 crc kubenswrapper[4663]: E1212 14:05:53.869920 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:53 crc kubenswrapper[4663]: I1212 14:05:53.870898 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:05:54 crc kubenswrapper[4663]: I1212 14:05:54.307531 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/3.log" Dec 12 14:05:54 crc kubenswrapper[4663]: I1212 14:05:54.309675 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerStarted","Data":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} Dec 12 14:05:54 crc kubenswrapper[4663]: I1212 14:05:54.310038 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:05:54 crc kubenswrapper[4663]: I1212 14:05:54.329395 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podStartSLOduration=111.329381701 podStartE2EDuration="1m51.329381701s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:05:54.328492106 +0000 UTC m=+133.753910160" watchObservedRunningTime="2025-12-12 14:05:54.329381701 +0000 UTC m=+133.754799755" Dec 12 14:05:54 crc kubenswrapper[4663]: I1212 14:05:54.517628 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9t6qf"] Dec 12 14:05:54 crc kubenswrapper[4663]: I1212 14:05:54.517776 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:54 crc kubenswrapper[4663]: E1212 14:05:54.517857 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:55 crc kubenswrapper[4663]: I1212 14:05:55.870034 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:55 crc kubenswrapper[4663]: I1212 14:05:55.870049 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:55 crc kubenswrapper[4663]: E1212 14:05:55.870361 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:55 crc kubenswrapper[4663]: I1212 14:05:55.870076 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:55 crc kubenswrapper[4663]: E1212 14:05:55.870399 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:55 crc kubenswrapper[4663]: E1212 14:05:55.870437 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:55 crc kubenswrapper[4663]: I1212 14:05:55.870064 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:55 crc kubenswrapper[4663]: E1212 14:05:55.870787 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:55 crc kubenswrapper[4663]: E1212 14:05:55.944854 4663 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 12 14:05:57 crc kubenswrapper[4663]: I1212 14:05:57.869910 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:57 crc kubenswrapper[4663]: E1212 14:05:57.870307 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:57 crc kubenswrapper[4663]: I1212 14:05:57.870340 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:57 crc kubenswrapper[4663]: I1212 14:05:57.870373 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:57 crc kubenswrapper[4663]: I1212 14:05:57.870413 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:57 crc kubenswrapper[4663]: E1212 14:05:57.870489 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:57 crc kubenswrapper[4663]: E1212 14:05:57.870660 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:05:57 crc kubenswrapper[4663]: E1212 14:05:57.870810 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:59 crc kubenswrapper[4663]: I1212 14:05:59.869671 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:05:59 crc kubenswrapper[4663]: I1212 14:05:59.869693 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:05:59 crc kubenswrapper[4663]: I1212 14:05:59.869731 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:05:59 crc kubenswrapper[4663]: E1212 14:05:59.869803 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 12 14:05:59 crc kubenswrapper[4663]: I1212 14:05:59.869895 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:05:59 crc kubenswrapper[4663]: E1212 14:05:59.869978 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 12 14:05:59 crc kubenswrapper[4663]: E1212 14:05:59.870052 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9t6qf" podUID="91ce7def-b363-4fe9-8d21-b161e64d0f1c" Dec 12 14:05:59 crc kubenswrapper[4663]: E1212 14:05:59.870137 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.869490 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.869538 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.869774 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.870134 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.871143 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.871331 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.871557 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.872209 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.872522 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 12 14:06:01 crc kubenswrapper[4663]: I1212 14:06:01.872677 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.891871 4663 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.912609 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.913128 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.913576 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lq2p4"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.913995 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.914419 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.914714 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.915200 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.915640 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.915937 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.916147 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.916680 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qmlvn"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.917077 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.921397 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.923872 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.927553 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.927647 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.927697 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.927729 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.927587 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.928612 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5xjs7"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.928882 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.929491 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.929557 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.929667 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.929818 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.929684 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.930041 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.930136 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.930232 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.930359 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.930857 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932354 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932388 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932467 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932581 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932608 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932639 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932719 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932831 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.932960 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.933293 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.933473 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.933642 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.934042 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.934294 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.934676 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.934831 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cvblj"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.935293 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.941902 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.941919 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.941938 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.941994 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.942034 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.942002 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.942144 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.942442 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.945709 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.946176 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.946309 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.947022 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.947075 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.947155 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.947163 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.947459 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.948394 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9hr59"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.949205 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.950969 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.951358 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.951570 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.951805 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.951960 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.952520 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.952844 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.953056 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.953179 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.953304 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.953436 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.953493 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.953657 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.954517 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.966072 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.966585 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.966718 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.966799 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.967130 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.967698 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.967845 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.968397 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-4fxpg"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.968940 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-f4vfm"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.969314 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.969809 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.974134 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.974572 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gtwt4"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.974814 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.974955 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.975255 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.975450 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.976940 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.977045 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.977251 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.977401 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.978952 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979056 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979071 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979077 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979171 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979253 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979343 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979708 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979936 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.980008 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.980040 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.979791 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.982825 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.984361 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.985001 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992014 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992083 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/317dd582-032f-45be-ab3d-30f199ac2261-config\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992116 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfzmv\" (UniqueName: \"kubernetes.io/projected/a39261d0-7d6f-4f3a-b741-e476ea306d9a-kube-api-access-cfzmv\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992160 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-service-ca-bundle\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992190 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9b79bab-ef9e-4304-a599-90d56a93b228-serving-cert\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992213 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992255 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992273 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-etcd-client\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992309 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/990d6886-87e6-4d23-9d63-95c2e2db5525-config\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992350 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsbp6\" (UniqueName: \"kubernetes.io/projected/fcf24979-86e7-4a32-af0b-458a7d4df2c2-kube-api-access-zsbp6\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992398 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a39261d0-7d6f-4f3a-b741-e476ea306d9a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992423 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992473 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/317dd582-032f-45be-ab3d-30f199ac2261-images\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992496 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/317dd582-032f-45be-ab3d-30f199ac2261-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992515 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-config\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992569 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992624 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/990d6886-87e6-4d23-9d63-95c2e2db5525-machine-approver-tls\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992654 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992671 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-encryption-config\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992703 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcf24979-86e7-4a32-af0b-458a7d4df2c2-audit-dir\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992728 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992775 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27da8075-7ceb-4be8-aac2-db2a24030a24-serving-cert\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992792 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-audit-policies\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992812 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992849 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992871 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-serving-cert\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992892 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992931 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmwt4\" (UniqueName: \"kubernetes.io/projected/27da8075-7ceb-4be8-aac2-db2a24030a24-kube-api-access-tmwt4\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.992959 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993006 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qglbb\" (UniqueName: \"kubernetes.io/projected/990d6886-87e6-4d23-9d63-95c2e2db5525-kube-api-access-qglbb\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993031 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g9kd\" (UniqueName: \"kubernetes.io/projected/d9b79bab-ef9e-4304-a599-90d56a93b228-kube-api-access-9g9kd\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993050 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9dbb\" (UniqueName: \"kubernetes.io/projected/317dd582-032f-45be-ab3d-30f199ac2261-kube-api-access-r9dbb\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993088 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993110 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993130 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxgm4\" (UniqueName: \"kubernetes.io/projected/b3f05375-5727-4536-8d9e-541cdb88127a-kube-api-access-pxgm4\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993167 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d9b79bab-ef9e-4304-a599-90d56a93b228-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993198 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39261d0-7d6f-4f3a-b741-e476ea306d9a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993236 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3f05375-5727-4536-8d9e-541cdb88127a-audit-dir\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993256 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-audit-policies\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993278 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/990d6886-87e6-4d23-9d63-95c2e2db5525-auth-proxy-config\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.993317 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.994140 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.994880 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.995668 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.996081 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-ddcb6"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.996244 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.996613 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt"] Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.996941 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.996987 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.997314 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:03 crc kubenswrapper[4663]: I1212 14:06:03.999615 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:03.999883 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:03.999909 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ns242"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.000612 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.000820 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.001348 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.001489 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.001608 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.001757 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.001762 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.001853 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.002023 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.002124 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.009292 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.009414 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.009485 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.009554 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.009928 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.010084 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.010586 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.010699 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.012686 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsrh2"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.013258 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.013321 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.013815 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.015190 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.015585 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.015870 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.016025 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.016183 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.016246 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.016188 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.016609 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-j724k"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.017071 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.018290 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.021896 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.022477 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.022607 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.022977 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.023364 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.023939 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.024257 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.024580 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.025266 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zzw52"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.025736 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.027697 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.028611 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.028676 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t5mzq"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.029228 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.029524 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lq2p4"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.030916 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.033430 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.034165 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8g7gt"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.034469 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.034641 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.034670 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.035079 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.035214 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.035362 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.037955 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.039082 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.039867 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5xjs7"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.041272 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsrh2"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.043148 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.044950 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.045947 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qttz2"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.046830 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.048805 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gtwt4"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.048844 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.049353 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.050609 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qmlvn"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.051392 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.052144 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.053128 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cvblj"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.053882 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.055451 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.059975 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4fxpg"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.062090 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.063025 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.063960 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ns242"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.064791 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.065683 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.066505 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-f4vfm"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.067363 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.068199 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.069022 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.069836 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.070671 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9hr59"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.071493 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.072431 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8g7gt"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.073135 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.073942 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zzw52"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.074724 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-j724k"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.075091 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.075538 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qttz2"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.076381 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t5mzq"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.077194 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-7krfn"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.077768 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.077985 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-t4pt8"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.078568 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.078781 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7krfn"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.079553 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-t4pt8"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093712 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093743 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/990d6886-87e6-4d23-9d63-95c2e2db5525-config\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093792 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093811 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-etcd-client\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093840 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093856 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rvmv\" (UniqueName: \"kubernetes.io/projected/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-kube-api-access-9rvmv\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093883 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsbp6\" (UniqueName: \"kubernetes.io/projected/fcf24979-86e7-4a32-af0b-458a7d4df2c2-kube-api-access-zsbp6\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093899 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-srv-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093915 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwhzm\" (UniqueName: \"kubernetes.io/projected/427e7a1a-b751-4226-9c17-7a8b9226ccc9-kube-api-access-rwhzm\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093931 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093945 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43fb274b-4188-4db5-a4f0-f7c18bca433e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093964 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-config\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093981 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a39261d0-7d6f-4f3a-b741-e476ea306d9a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.093996 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094015 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x47zj\" (UniqueName: \"kubernetes.io/projected/823b075c-2bee-4823-b938-afabe4af612a-kube-api-access-x47zj\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094031 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg55n\" (UniqueName: \"kubernetes.io/projected/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-kube-api-access-tg55n\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094053 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f4ac5e59-3c55-428e-bb44-d96db06fdc78-srv-cert\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094069 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/317dd582-032f-45be-ab3d-30f199ac2261-images\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094084 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/317dd582-032f-45be-ab3d-30f199ac2261-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094103 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-config\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094120 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094134 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f4ac5e59-3c55-428e-bb44-d96db06fdc78-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094148 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk887\" (UniqueName: \"kubernetes.io/projected/53b84a0d-9a65-471b-af63-19eec862c572-kube-api-access-rk887\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094163 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094186 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qtkn\" (UniqueName: \"kubernetes.io/projected/ddc512fd-9f37-4257-b51b-0d2debe5df0b-kube-api-access-7qtkn\") pod \"migrator-59844c95c7-qrzws\" (UID: \"ddc512fd-9f37-4257-b51b-0d2debe5df0b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094199 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-serving-cert\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094217 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094234 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d79nl\" (UniqueName: \"kubernetes.io/projected/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-kube-api-access-d79nl\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094251 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b18a264-5694-4065-92fb-a41b3f231a79-trusted-ca\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094264 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-metrics-tls\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094280 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-profile-collector-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094295 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/823b075c-2bee-4823-b938-afabe4af612a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094327 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-metrics-certs\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094375 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-serving-cert\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094398 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/990d6886-87e6-4d23-9d63-95c2e2db5525-machine-approver-tls\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094441 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094473 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-encryption-config\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094491 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6553ea68-1d18-4953-8e43-09d47f2745fc-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2rskd\" (UID: \"6553ea68-1d18-4953-8e43-09d47f2745fc\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094510 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc4gt\" (UniqueName: \"kubernetes.io/projected/6553ea68-1d18-4953-8e43-09d47f2745fc-kube-api-access-tc4gt\") pod \"package-server-manager-789f6589d5-2rskd\" (UID: \"6553ea68-1d18-4953-8e43-09d47f2745fc\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094523 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094543 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcf24979-86e7-4a32-af0b-458a7d4df2c2-audit-dir\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094559 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn2d6\" (UniqueName: \"kubernetes.io/projected/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-kube-api-access-cn2d6\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094576 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094607 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whzjf\" (UniqueName: \"kubernetes.io/projected/7b18a264-5694-4065-92fb-a41b3f231a79-kube-api-access-whzjf\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094624 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27da8075-7ceb-4be8-aac2-db2a24030a24-serving-cert\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094641 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnfmz\" (UniqueName: \"kubernetes.io/projected/adfbdf33-01b1-454d-bb33-b32818244bc2-kube-api-access-gnfmz\") pod \"downloads-7954f5f757-4fxpg\" (UID: \"adfbdf33-01b1-454d-bb33-b32818244bc2\") " pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094658 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-audit-policies\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094675 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094690 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094707 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-serving-cert\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094724 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b18a264-5694-4065-92fb-a41b3f231a79-serving-cert\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094757 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-default-certificate\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094776 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43fb274b-4188-4db5-a4f0-f7c18bca433e-config\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094793 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094814 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fdcb9400-6fea-46b5-ac30-975a690d67c5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zcz24\" (UID: \"fdcb9400-6fea-46b5-ac30-975a690d67c5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094831 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094849 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmwt4\" (UniqueName: \"kubernetes.io/projected/27da8075-7ceb-4be8-aac2-db2a24030a24-kube-api-access-tmwt4\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094865 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-oauth-config\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094882 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094897 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-config\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094923 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094940 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c7f3370-d558-4607-a4a3-9fc23d33b8a7-metrics-tls\") pod \"dns-operator-744455d44c-j724k\" (UID: \"5c7f3370-d558-4607-a4a3-9fc23d33b8a7\") " pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094955 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg9qg\" (UniqueName: \"kubernetes.io/projected/5c7f3370-d558-4607-a4a3-9fc23d33b8a7-kube-api-access-dg9qg\") pod \"dns-operator-744455d44c-j724k\" (UID: \"5c7f3370-d558-4607-a4a3-9fc23d33b8a7\") " pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094973 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qglbb\" (UniqueName: \"kubernetes.io/projected/990d6886-87e6-4d23-9d63-95c2e2db5525-kube-api-access-qglbb\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094991 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g9kd\" (UniqueName: \"kubernetes.io/projected/d9b79bab-ef9e-4304-a599-90d56a93b228-kube-api-access-9g9kd\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095010 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9dbb\" (UniqueName: \"kubernetes.io/projected/317dd582-032f-45be-ab3d-30f199ac2261-kube-api-access-r9dbb\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095026 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-trusted-ca-bundle\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095047 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095062 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095080 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-oauth-serving-cert\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095085 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/990d6886-87e6-4d23-9d63-95c2e2db5525-config\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095096 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9x56\" (UniqueName: \"kubernetes.io/projected/6ac5aa32-7726-427e-99ee-19b74838d5b6-kube-api-access-p9x56\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095113 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/427e7a1a-b751-4226-9c17-7a8b9226ccc9-service-ca-bundle\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095132 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39261d0-7d6f-4f3a-b741-e476ea306d9a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095148 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3f05375-5727-4536-8d9e-541cdb88127a-audit-dir\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095147 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-config\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095164 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxgm4\" (UniqueName: \"kubernetes.io/projected/b3f05375-5727-4536-8d9e-541cdb88127a-kube-api-access-pxgm4\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095200 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d9b79bab-ef9e-4304-a599-90d56a93b228-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095231 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x2cq\" (UniqueName: \"kubernetes.io/projected/f4ac5e59-3c55-428e-bb44-d96db06fdc78-kube-api-access-6x2cq\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.094726 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095798 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095817 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095853 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095877 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbh55\" (UniqueName: \"kubernetes.io/projected/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-kube-api-access-jbh55\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095913 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/990d6886-87e6-4d23-9d63-95c2e2db5525-auth-proxy-config\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095934 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-audit-policies\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095953 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7stk6\" (UniqueName: \"kubernetes.io/projected/fdcb9400-6fea-46b5-ac30-975a690d67c5-kube-api-access-7stk6\") pod \"control-plane-machine-set-operator-78cbb6b69f-zcz24\" (UID: \"fdcb9400-6fea-46b5-ac30-975a690d67c5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.095973 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-stats-auth\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096001 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096015 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/317dd582-032f-45be-ab3d-30f199ac2261-images\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096021 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b18a264-5694-4065-92fb-a41b3f231a79-config\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096063 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096086 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/823b075c-2bee-4823-b938-afabe4af612a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096109 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfzmv\" (UniqueName: \"kubernetes.io/projected/a39261d0-7d6f-4f3a-b741-e476ea306d9a-kube-api-access-cfzmv\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096127 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/317dd582-032f-45be-ab3d-30f199ac2261-config\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096144 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-trusted-ca\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096161 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43fb274b-4188-4db5-a4f0-f7c18bca433e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096181 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-service-ca-bundle\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096200 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9b79bab-ef9e-4304-a599-90d56a93b228-serving-cert\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096219 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096236 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-service-ca\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096527 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d9b79bab-ef9e-4304-a599-90d56a93b228-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096574 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/990d6886-87e6-4d23-9d63-95c2e2db5525-auth-proxy-config\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.096973 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-service-ca-bundle\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.097157 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-audit-policies\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.098363 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.098382 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.098793 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.098798 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-etcd-client\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.098962 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/990d6886-87e6-4d23-9d63-95c2e2db5525-machine-approver-tls\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.099012 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/317dd582-032f-45be-ab3d-30f199ac2261-config\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.099224 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.099511 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.099607 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.099763 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/27da8075-7ceb-4be8-aac2-db2a24030a24-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100101 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100103 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-audit-policies\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100304 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100351 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcf24979-86e7-4a32-af0b-458a7d4df2c2-audit-dir\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100375 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3f05375-5727-4536-8d9e-541cdb88127a-audit-dir\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100478 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9b79bab-ef9e-4304-a599-90d56a93b228-serving-cert\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100538 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a39261d0-7d6f-4f3a-b741-e476ea306d9a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100552 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100644 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a39261d0-7d6f-4f3a-b741-e476ea306d9a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.100924 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcf24979-86e7-4a32-af0b-458a7d4df2c2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.101035 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/317dd582-032f-45be-ab3d-30f199ac2261-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.101109 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27da8075-7ceb-4be8-aac2-db2a24030a24-serving-cert\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.101553 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.102545 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-serving-cert\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.102647 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.102830 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcf24979-86e7-4a32-af0b-458a7d4df2c2-encryption-config\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.128375 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-zxqd4"] Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.129043 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.134814 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.154693 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.174844 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.195051 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196853 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43fb274b-4188-4db5-a4f0-f7c18bca433e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196882 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-config\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196903 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196920 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x47zj\" (UniqueName: \"kubernetes.io/projected/823b075c-2bee-4823-b938-afabe4af612a-kube-api-access-x47zj\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196949 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg55n\" (UniqueName: \"kubernetes.io/projected/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-kube-api-access-tg55n\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196966 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f4ac5e59-3c55-428e-bb44-d96db06fdc78-srv-cert\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.196989 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197005 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f4ac5e59-3c55-428e-bb44-d96db06fdc78-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197020 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk887\" (UniqueName: \"kubernetes.io/projected/53b84a0d-9a65-471b-af63-19eec862c572-kube-api-access-rk887\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197034 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197052 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qtkn\" (UniqueName: \"kubernetes.io/projected/ddc512fd-9f37-4257-b51b-0d2debe5df0b-kube-api-access-7qtkn\") pod \"migrator-59844c95c7-qrzws\" (UID: \"ddc512fd-9f37-4257-b51b-0d2debe5df0b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197067 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-serving-cert\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197090 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d79nl\" (UniqueName: \"kubernetes.io/projected/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-kube-api-access-d79nl\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197106 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b18a264-5694-4065-92fb-a41b3f231a79-trusted-ca\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197119 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-metrics-tls\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197133 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-profile-collector-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197160 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/823b075c-2bee-4823-b938-afabe4af612a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197175 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-metrics-certs\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197192 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-serving-cert\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197209 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc4gt\" (UniqueName: \"kubernetes.io/projected/6553ea68-1d18-4953-8e43-09d47f2745fc-kube-api-access-tc4gt\") pod \"package-server-manager-789f6589d5-2rskd\" (UID: \"6553ea68-1d18-4953-8e43-09d47f2745fc\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197224 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197239 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6553ea68-1d18-4953-8e43-09d47f2745fc-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2rskd\" (UID: \"6553ea68-1d18-4953-8e43-09d47f2745fc\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197326 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn2d6\" (UniqueName: \"kubernetes.io/projected/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-kube-api-access-cn2d6\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197346 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whzjf\" (UniqueName: \"kubernetes.io/projected/7b18a264-5694-4065-92fb-a41b3f231a79-kube-api-access-whzjf\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197365 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnfmz\" (UniqueName: \"kubernetes.io/projected/adfbdf33-01b1-454d-bb33-b32818244bc2-kube-api-access-gnfmz\") pod \"downloads-7954f5f757-4fxpg\" (UID: \"adfbdf33-01b1-454d-bb33-b32818244bc2\") " pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197380 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b18a264-5694-4065-92fb-a41b3f231a79-serving-cert\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197394 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-default-certificate\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197408 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43fb274b-4188-4db5-a4f0-f7c18bca433e-config\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197426 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fdcb9400-6fea-46b5-ac30-975a690d67c5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zcz24\" (UID: \"fdcb9400-6fea-46b5-ac30-975a690d67c5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197442 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197457 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-oauth-config\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197472 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197491 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-config\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197528 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c7f3370-d558-4607-a4a3-9fc23d33b8a7-metrics-tls\") pod \"dns-operator-744455d44c-j724k\" (UID: \"5c7f3370-d558-4607-a4a3-9fc23d33b8a7\") " pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197542 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg9qg\" (UniqueName: \"kubernetes.io/projected/5c7f3370-d558-4607-a4a3-9fc23d33b8a7-kube-api-access-dg9qg\") pod \"dns-operator-744455d44c-j724k\" (UID: \"5c7f3370-d558-4607-a4a3-9fc23d33b8a7\") " pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197579 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-trusted-ca-bundle\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197605 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-oauth-serving-cert\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197618 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9x56\" (UniqueName: \"kubernetes.io/projected/6ac5aa32-7726-427e-99ee-19b74838d5b6-kube-api-access-p9x56\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197632 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/427e7a1a-b751-4226-9c17-7a8b9226ccc9-service-ca-bundle\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197652 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x2cq\" (UniqueName: \"kubernetes.io/projected/f4ac5e59-3c55-428e-bb44-d96db06fdc78-kube-api-access-6x2cq\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197665 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197677 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197693 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbh55\" (UniqueName: \"kubernetes.io/projected/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-kube-api-access-jbh55\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197708 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7stk6\" (UniqueName: \"kubernetes.io/projected/fdcb9400-6fea-46b5-ac30-975a690d67c5-kube-api-access-7stk6\") pod \"control-plane-machine-set-operator-78cbb6b69f-zcz24\" (UID: \"fdcb9400-6fea-46b5-ac30-975a690d67c5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197722 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-stats-auth\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197739 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b18a264-5694-4065-92fb-a41b3f231a79-config\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197770 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/823b075c-2bee-4823-b938-afabe4af612a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197785 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-trusted-ca\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197799 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43fb274b-4188-4db5-a4f0-f7c18bca433e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197819 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197834 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-service-ca\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197851 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197865 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rvmv\" (UniqueName: \"kubernetes.io/projected/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-kube-api-access-9rvmv\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197891 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-srv-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.197905 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwhzm\" (UniqueName: \"kubernetes.io/projected/427e7a1a-b751-4226-9c17-7a8b9226ccc9-kube-api-access-rwhzm\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.199826 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.200640 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b18a264-5694-4065-92fb-a41b3f231a79-config\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.200998 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-serving-cert\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.201031 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-oauth-config\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.201136 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.201778 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-config\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.201849 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-oauth-serving-cert\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.201852 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-service-ca\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.202451 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.202930 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/823b075c-2bee-4823-b938-afabe4af612a-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.203068 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b18a264-5694-4065-92fb-a41b3f231a79-trusted-ca\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.203463 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/823b075c-2bee-4823-b938-afabe4af612a-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.207896 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b18a264-5694-4065-92fb-a41b3f231a79-serving-cert\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.209198 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.213313 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-trusted-ca-bundle\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.214545 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.235728 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.255922 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.275390 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.285383 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-metrics-tls\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.294936 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.315439 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.339315 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.343143 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-trusted-ca\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.355375 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.374782 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.381253 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-metrics-certs\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.394647 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.401652 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-default-certificate\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.415102 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.435372 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.442300 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/427e7a1a-b751-4226-9c17-7a8b9226ccc9-stats-auth\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.454686 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.460632 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/427e7a1a-b751-4226-9c17-7a8b9226ccc9-service-ca-bundle\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.474756 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.495064 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.515098 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.534419 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.538883 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.554357 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.564668 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.575094 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.595389 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.615254 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.634479 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.654472 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.674604 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.700688 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.715930 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.734599 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.740971 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43fb274b-4188-4db5-a4f0-f7c18bca433e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.754663 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.763524 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43fb274b-4188-4db5-a4f0-f7c18bca433e-config\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.775712 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.794833 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.814725 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.835016 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.854497 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.874507 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.894820 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.901346 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f4ac5e59-3c55-428e-bb44-d96db06fdc78-srv-cert\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.914933 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.935193 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.943620 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f4ac5e59-3c55-428e-bb44-d96db06fdc78-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.945575 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-profile-collector-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.954648 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.974960 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 12 14:06:04 crc kubenswrapper[4663]: I1212 14:06:04.994738 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.014597 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.033700 4663 request.go:700] Waited for 1.016448852s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns-operator/secrets?fieldSelector=metadata.name%3Ddns-operator-dockercfg-9mqw5&limit=500&resourceVersion=0 Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.034837 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.055059 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.064327 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c7f3370-d558-4607-a4a3-9fc23d33b8a7-metrics-tls\") pod \"dns-operator-744455d44c-j724k\" (UID: \"5c7f3370-d558-4607-a4a3-9fc23d33b8a7\") " pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.074398 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.095634 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.101397 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6553ea68-1d18-4953-8e43-09d47f2745fc-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2rskd\" (UID: \"6553ea68-1d18-4953-8e43-09d47f2745fc\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.114704 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.134953 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.144835 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fdcb9400-6fea-46b5-ac30-975a690d67c5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zcz24\" (UID: \"fdcb9400-6fea-46b5-ac30-975a690d67c5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.154924 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.174998 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.194602 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.198574 4663 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.198623 4663 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.198644 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-serving-cert podName:2102ea25-a87a-4deb-b2f6-bf54ae31f08f nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.698628633 +0000 UTC m=+145.124046686 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-serving-cert") pod "service-ca-operator-777779d784-gxgm9" (UID: "2102ea25-a87a-4deb-b2f6-bf54ae31f08f") : failed to sync secret cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.198669 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-config podName:2102ea25-a87a-4deb-b2f6-bf54ae31f08f nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.698654892 +0000 UTC m=+145.124072946 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-config") pod "service-ca-operator-777779d784-gxgm9" (UID: "2102ea25-a87a-4deb-b2f6-bf54ae31f08f") : failed to sync configmap cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.202164 4663 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.202192 4663 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.202207 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-webhook-certs podName:55f03b2c-5afc-42ea-8e36-f68b358e3f4f nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.702197743 +0000 UTC m=+145.127615797 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-webhook-certs") pod "multus-admission-controller-857f4d67dd-zzw52" (UID: "55f03b2c-5afc-42ea-8e36-f68b358e3f4f") : failed to sync secret cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.202236 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-srv-cert podName:53b84a0d-9a65-471b-af63-19eec862c572 nodeName:}" failed. No retries permitted until 2025-12-12 14:06:05.702223802 +0000 UTC m=+145.127641856 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-srv-cert") pod "catalog-operator-68c6474976-bc9bd" (UID: "53b84a0d-9a65-471b-af63-19eec862c572") : failed to sync secret cache: timed out waiting for the condition Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.214210 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.241452 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.254264 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.274716 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.295292 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.314283 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.334738 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.355162 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.374488 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.394838 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.435273 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.454505 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.475967 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.494911 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.514965 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.539772 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.555125 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.575125 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.594405 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.615267 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.634351 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.655137 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.675207 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.694652 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.714597 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.715608 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-serving-cert\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.715767 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.715814 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-srv-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.715841 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-config\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.716379 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-config\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.718539 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-serving-cert\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.718731 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/53b84a0d-9a65-471b-af63-19eec862c572-srv-cert\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.719315 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.734544 4663 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.754321 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.774726 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.795287 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.814987 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.817009 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:05 crc kubenswrapper[4663]: E1212 14:06:05.817184 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:08:07.817168329 +0000 UTC m=+267.242586383 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.834635 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.855439 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.874596 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.894834 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.918034 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.918110 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.918206 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.918315 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.918984 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.921283 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.921847 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.921908 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.927726 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsbp6\" (UniqueName: \"kubernetes.io/projected/fcf24979-86e7-4a32-af0b-458a7d4df2c2-kube-api-access-zsbp6\") pod \"apiserver-7bbb656c7d-vhb2f\" (UID: \"fcf24979-86e7-4a32-af0b-458a7d4df2c2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.945703 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxgm4\" (UniqueName: \"kubernetes.io/projected/b3f05375-5727-4536-8d9e-541cdb88127a-kube-api-access-pxgm4\") pod \"oauth-openshift-558db77b4-5xjs7\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.967655 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfzmv\" (UniqueName: \"kubernetes.io/projected/a39261d0-7d6f-4f3a-b741-e476ea306d9a-kube-api-access-cfzmv\") pod \"openshift-apiserver-operator-796bbdcf4f-whkx5\" (UID: \"a39261d0-7d6f-4f3a-b741-e476ea306d9a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:05 crc kubenswrapper[4663]: I1212 14:06:05.986157 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9dbb\" (UniqueName: \"kubernetes.io/projected/317dd582-032f-45be-ab3d-30f199ac2261-kube-api-access-r9dbb\") pod \"machine-api-operator-5694c8668f-lq2p4\" (UID: \"317dd582-032f-45be-ab3d-30f199ac2261\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.005998 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmwt4\" (UniqueName: \"kubernetes.io/projected/27da8075-7ceb-4be8-aac2-db2a24030a24-kube-api-access-tmwt4\") pod \"authentication-operator-69f744f599-qmlvn\" (UID: \"27da8075-7ceb-4be8-aac2-db2a24030a24\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.026360 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qglbb\" (UniqueName: \"kubernetes.io/projected/990d6886-87e6-4d23-9d63-95c2e2db5525-kube-api-access-qglbb\") pod \"machine-approver-56656f9798-qqc4q\" (UID: \"990d6886-87e6-4d23-9d63-95c2e2db5525\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.029250 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.034033 4663 request.go:700] Waited for 1.93341805s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-config-operator/serviceaccounts/openshift-config-operator/token Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.034194 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.040152 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.047821 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g9kd\" (UniqueName: \"kubernetes.io/projected/d9b79bab-ef9e-4304-a599-90d56a93b228-kube-api-access-9g9kd\") pod \"openshift-config-operator-7777fb866f-7wb5x\" (UID: \"d9b79bab-ef9e-4304-a599-90d56a93b228\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.054977 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.070822 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod990d6886_87e6_4d23_9d63_95c2e2db5525.slice/crio-4df78e554ec6ce6a40b010fedffe4857b08a430fd243bcd1d1145aa960a85593 WatchSource:0}: Error finding container 4df78e554ec6ce6a40b010fedffe4857b08a430fd243bcd1d1145aa960a85593: Status 404 returned error can't find the container with id 4df78e554ec6ce6a40b010fedffe4857b08a430fd243bcd1d1145aa960a85593 Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.073758 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.075177 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.080065 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.082878 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.090971 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.095207 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.097120 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.116931 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.129335 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.129387 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.148847 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac4719c1-f4b6-4bd1-bf13-b2fbfecad103-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-gw8zt\" (UID: \"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.177280 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x47zj\" (UniqueName: \"kubernetes.io/projected/823b075c-2bee-4823-b938-afabe4af612a-kube-api-access-x47zj\") pod \"openshift-controller-manager-operator-756b6f6bc6-4c6jc\" (UID: \"823b075c-2bee-4823-b938-afabe4af612a\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.189934 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.190739 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qtkn\" (UniqueName: \"kubernetes.io/projected/ddc512fd-9f37-4257-b51b-0d2debe5df0b-kube-api-access-7qtkn\") pod \"migrator-59844c95c7-qrzws\" (UID: \"ddc512fd-9f37-4257-b51b-0d2debe5df0b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.206020 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lq2p4"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.207779 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg55n\" (UniqueName: \"kubernetes.io/projected/2102ea25-a87a-4deb-b2f6-bf54ae31f08f-kube-api-access-tg55n\") pod \"service-ca-operator-777779d784-gxgm9\" (UID: \"2102ea25-a87a-4deb-b2f6-bf54ae31f08f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.225251 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod317dd582_032f_45be_ab3d_30f199ac2261.slice/crio-960c7710fcd5ccb74e9dd4ba59a9b1ef5b4d17cfed7f222139bacebe0ecedf5e WatchSource:0}: Error finding container 960c7710fcd5ccb74e9dd4ba59a9b1ef5b4d17cfed7f222139bacebe0ecedf5e: Status 404 returned error can't find the container with id 960c7710fcd5ccb74e9dd4ba59a9b1ef5b4d17cfed7f222139bacebe0ecedf5e Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.232516 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwhzm\" (UniqueName: \"kubernetes.io/projected/427e7a1a-b751-4226-9c17-7a8b9226ccc9-kube-api-access-rwhzm\") pod \"router-default-5444994796-ddcb6\" (UID: \"427e7a1a-b751-4226-9c17-7a8b9226ccc9\") " pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.252521 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc4gt\" (UniqueName: \"kubernetes.io/projected/6553ea68-1d18-4953-8e43-09d47f2745fc-kube-api-access-tc4gt\") pod \"package-server-manager-789f6589d5-2rskd\" (UID: \"6553ea68-1d18-4953-8e43-09d47f2745fc\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.262964 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.263136 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.269497 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.288703 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn2d6\" (UniqueName: \"kubernetes.io/projected/dab80307-f3f3-4fda-b4a7-eb0da7c60a9c-kube-api-access-cn2d6\") pod \"ingress-operator-5b745b69d9-z9g5q\" (UID: \"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.309455 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whzjf\" (UniqueName: \"kubernetes.io/projected/7b18a264-5694-4065-92fb-a41b3f231a79-kube-api-access-whzjf\") pod \"console-operator-58897d9998-9hr59\" (UID: \"7b18a264-5694-4065-92fb-a41b3f231a79\") " pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.323637 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.324163 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.328172 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x2cq\" (UniqueName: \"kubernetes.io/projected/f4ac5e59-3c55-428e-bb44-d96db06fdc78-kube-api-access-6x2cq\") pod \"olm-operator-6b444d44fb-wpq62\" (UID: \"f4ac5e59-3c55-428e-bb44-d96db06fdc78\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.341660 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" event={"ID":"317dd582-032f-45be-ab3d-30f199ac2261","Type":"ContainerStarted","Data":"d0f73d5a767c21ef68f4d6e001c0ef153d50f8a539bedd2dc3e5b012b6bb8123"} Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.341705 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" event={"ID":"317dd582-032f-45be-ab3d-30f199ac2261","Type":"ContainerStarted","Data":"960c7710fcd5ccb74e9dd4ba59a9b1ef5b4d17cfed7f222139bacebe0ecedf5e"} Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.348664 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg9qg\" (UniqueName: \"kubernetes.io/projected/5c7f3370-d558-4607-a4a3-9fc23d33b8a7-kube-api-access-dg9qg\") pod \"dns-operator-744455d44c-j724k\" (UID: \"5c7f3370-d558-4607-a4a3-9fc23d33b8a7\") " pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.348951 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" event={"ID":"a39261d0-7d6f-4f3a-b741-e476ea306d9a","Type":"ContainerStarted","Data":"42641de0542d47b6348d19a8fee53087df47585472ec4aa5ea57c91919e2d35d"} Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.355526 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" event={"ID":"990d6886-87e6-4d23-9d63-95c2e2db5525","Type":"ContainerStarted","Data":"e9d2de4c4123264b8dafccd4a045071ddaedf64b7ffa75fac93eae1575002acf"} Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.355559 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" event={"ID":"990d6886-87e6-4d23-9d63-95c2e2db5525","Type":"ContainerStarted","Data":"4df78e554ec6ce6a40b010fedffe4857b08a430fd243bcd1d1145aa960a85593"} Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.370082 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-qmlvn"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.370702 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.372295 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbh55\" (UniqueName: \"kubernetes.io/projected/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-kube-api-access-jbh55\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.386615 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7stk6\" (UniqueName: \"kubernetes.io/projected/fdcb9400-6fea-46b5-ac30-975a690d67c5-kube-api-access-7stk6\") pod \"control-plane-machine-set-operator-78cbb6b69f-zcz24\" (UID: \"fdcb9400-6fea-46b5-ac30-975a690d67c5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.403674 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.411300 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk887\" (UniqueName: \"kubernetes.io/projected/53b84a0d-9a65-471b-af63-19eec862c572-kube-api-access-rk887\") pod \"catalog-operator-68c6474976-bc9bd\" (UID: \"53b84a0d-9a65-471b-af63-19eec862c572\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.421443 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.425771 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.428521 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5xjs7"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.431050 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43fb274b-4188-4db5-a4f0-f7c18bca433e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-v2jqd\" (UID: \"43fb274b-4188-4db5-a4f0-f7c18bca433e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.433458 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.448046 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rvmv\" (UniqueName: \"kubernetes.io/projected/55f03b2c-5afc-42ea-8e36-f68b358e3f4f-kube-api-access-9rvmv\") pod \"multus-admission-controller-857f4d67dd-zzw52\" (UID: \"55f03b2c-5afc-42ea-8e36-f68b358e3f4f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.456825 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.460931 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3f05375_5727_4536_8d9e_541cdb88127a.slice/crio-7b1ae78f142a657654543479a7b3fa7d6b7cdfce49c99a160951e2bac2cff2ad WatchSource:0}: Error finding container 7b1ae78f142a657654543479a7b3fa7d6b7cdfce49c99a160951e2bac2cff2ad: Status 404 returned error can't find the container with id 7b1ae78f142a657654543479a7b3fa7d6b7cdfce49c99a160951e2bac2cff2ad Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.461460 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcf24979_86e7_4a32_af0b_458a7d4df2c2.slice/crio-60a74b1128789529bc1391a8e2bb62c5bd05f896414f24e396cf05df297c63eb WatchSource:0}: Error finding container 60a74b1128789529bc1391a8e2bb62c5bd05f896414f24e396cf05df297c63eb: Status 404 returned error can't find the container with id 60a74b1128789529bc1391a8e2bb62c5bd05f896414f24e396cf05df297c63eb Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.467347 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnfmz\" (UniqueName: \"kubernetes.io/projected/adfbdf33-01b1-454d-bb33-b32818244bc2-kube-api-access-gnfmz\") pod \"downloads-7954f5f757-4fxpg\" (UID: \"adfbdf33-01b1-454d-bb33-b32818244bc2\") " pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.478877 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.485693 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.490084 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-67kxr\" (UID: \"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.517682 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9x56\" (UniqueName: \"kubernetes.io/projected/6ac5aa32-7726-427e-99ee-19b74838d5b6-kube-api-access-p9x56\") pod \"console-f9d7485db-f4vfm\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.518109 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.531425 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d79nl\" (UniqueName: \"kubernetes.io/projected/7498af08-fd9e-4439-a0d7-5e5a8d5cc92d-kube-api-access-d79nl\") pod \"kube-storage-version-migrator-operator-b67b599dd-49pbd\" (UID: \"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.532926 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.542972 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.544497 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.555495 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2102ea25_a87a_4deb_b2f6_bf54ae31f08f.slice/crio-2a53e2012677b1f02b5773c88a16e0f654855e3a89103278ca885e04a1a2ec40 WatchSource:0}: Error finding container 2a53e2012677b1f02b5773c88a16e0f654855e3a89103278ca885e04a1a2ec40: Status 404 returned error can't find the container with id 2a53e2012677b1f02b5773c88a16e0f654855e3a89103278ca885e04a1a2ec40 Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.556286 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-j724k" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.573374 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.583965 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.590941 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.618050 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626075 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8t8q\" (UniqueName: \"kubernetes.io/projected/22b1ed4b-7956-4b80-8054-51a85f130dde-kube-api-access-q8t8q\") pod \"cluster-samples-operator-665b6dd947-kqvdh\" (UID: \"22b1ed4b-7956-4b80-8054-51a85f130dde\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626100 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fcc5eb52-de54-4535-9c8a-5158b76d54a5-webhook-cert\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626117 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-bound-sa-token\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626131 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-client\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626147 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-service-ca\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626173 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-registry-tls\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626185 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cc90392c-96be-4eda-b4ee-ef73500467e5-proxy-tls\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626200 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-encryption-config\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626216 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wksg\" (UniqueName: \"kubernetes.io/projected/6494cb84-311b-407d-b709-495ff1a2aaba-kube-api-access-7wksg\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626230 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-ca\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626244 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/191340bd-661f-4987-b348-20887c35f540-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626260 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-config\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626273 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5e2e4d7d-7756-4f77-9151-fe775e3beb42-images\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626287 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5e2e4d7d-7756-4f77-9151-fe775e3beb42-proxy-tls\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626300 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/191340bd-661f-4987-b348-20887c35f540-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626313 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrmw6\" (UniqueName: \"kubernetes.io/projected/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-kube-api-access-qrmw6\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626326 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-etcd-client\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626340 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-registry-certificates\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626356 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fcc5eb52-de54-4535-9c8a-5158b76d54a5-tmpfs\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626372 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e600b0d3-bfe3-4a65-8331-ae9218a5245f-config\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626385 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/22b1ed4b-7956-4b80-8054-51a85f130dde-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-kqvdh\" (UID: \"22b1ed4b-7956-4b80-8054-51a85f130dde\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626400 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e600b0d3-bfe3-4a65-8331-ae9218a5245f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626419 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626436 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5e2e4d7d-7756-4f77-9151-fe775e3beb42-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626457 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-config\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626471 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cc90392c-96be-4eda-b4ee-ef73500467e5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626486 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-serving-cert\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626503 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jw4m\" (UniqueName: \"kubernetes.io/projected/a7b5011e-2f07-4f46-8597-96fb97e199ec-kube-api-access-6jw4m\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626516 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-serving-cert\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626543 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-trusted-ca\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626557 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626587 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626609 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w8zb\" (UniqueName: \"kubernetes.io/projected/0b25f747-6011-4a3b-8058-bb1115a6880e-kube-api-access-9w8zb\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626623 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5kqc\" (UniqueName: \"kubernetes.io/projected/890f4608-3a85-4bae-be47-f35c7f268a29-kube-api-access-s5kqc\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626636 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-config\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626649 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6494cb84-311b-407d-b709-495ff1a2aaba-signing-cabundle\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626661 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/890f4608-3a85-4bae-be47-f35c7f268a29-node-pullsecrets\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626688 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fcc5eb52-de54-4535-9c8a-5158b76d54a5-apiservice-cert\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626700 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7b5011e-2f07-4f46-8597-96fb97e199ec-serving-cert\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.626716 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627083 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn9mc\" (UniqueName: \"kubernetes.io/projected/cc90392c-96be-4eda-b4ee-ef73500467e5-kube-api-access-fn9mc\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627103 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fjq8\" (UniqueName: \"kubernetes.io/projected/fcc5eb52-de54-4535-9c8a-5158b76d54a5-kube-api-access-5fjq8\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627117 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-audit\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627158 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-image-import-ca\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627509 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-client-ca\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: E1212 14:06:06.627708 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.127698928 +0000 UTC m=+146.553116983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627852 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcjlf\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-kube-api-access-hcjlf\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627964 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e600b0d3-bfe3-4a65-8331-ae9218a5245f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.627997 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/890f4608-3a85-4bae-be47-f35c7f268a29-audit-dir\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.628051 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6494cb84-311b-407d-b709-495ff1a2aaba-signing-key\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.628071 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-etcd-serving-ca\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.628119 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22nfb\" (UniqueName: \"kubernetes.io/projected/5e2e4d7d-7756-4f77-9151-fe775e3beb42-kube-api-access-22nfb\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.650136 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9hr59"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.697200 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.700906 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.702677 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b18a264_5694_4065_92fb_a41b3f231a79.slice/crio-6a7f5a2f3662b24f63f69ac2cb7958c7cf8a11c9045e031442eab789db193a63 WatchSource:0}: Error finding container 6a7f5a2f3662b24f63f69ac2cb7958c7cf8a11c9045e031442eab789db193a63: Status 404 returned error can't find the container with id 6a7f5a2f3662b24f63f69ac2cb7958c7cf8a11c9045e031442eab789db193a63 Dec 12 14:06:06 crc kubenswrapper[4663]: W1212 14:06:06.707548 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod823b075c_2bee_4823_b938_afabe4af612a.slice/crio-4876f6f4049840767d2d4dd53dddd156b9cb562b0377ea99e9a1e2b5add3f12f WatchSource:0}: Error finding container 4876f6f4049840767d2d4dd53dddd156b9cb562b0377ea99e9a1e2b5add3f12f: Status 404 returned error can't find the container with id 4876f6f4049840767d2d4dd53dddd156b9cb562b0377ea99e9a1e2b5add3f12f Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735012 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735272 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22nfb\" (UniqueName: \"kubernetes.io/projected/5e2e4d7d-7756-4f77-9151-fe775e3beb42-kube-api-access-22nfb\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735310 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8t8q\" (UniqueName: \"kubernetes.io/projected/22b1ed4b-7956-4b80-8054-51a85f130dde-kube-api-access-q8t8q\") pod \"cluster-samples-operator-665b6dd947-kqvdh\" (UID: \"22b1ed4b-7956-4b80-8054-51a85f130dde\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735349 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fcc5eb52-de54-4535-9c8a-5158b76d54a5-webhook-cert\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735390 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-bound-sa-token\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735407 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-client\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735453 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-service-ca\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735488 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/503bcbe4-217b-4060-83e3-b0dae775431d-secret-volume\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735541 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-registry-tls\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735578 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cc90392c-96be-4eda-b4ee-ef73500467e5-proxy-tls\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735597 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-encryption-config\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735625 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-registration-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735659 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wksg\" (UniqueName: \"kubernetes.io/projected/6494cb84-311b-407d-b709-495ff1a2aaba-kube-api-access-7wksg\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735677 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-ca\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735695 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/191340bd-661f-4987-b348-20887c35f540-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735711 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-csi-data-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735729 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0-cert\") pod \"ingress-canary-7krfn\" (UID: \"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0\") " pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735769 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-config\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735794 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7f64627e-8a01-439b-9401-be61f39a76cc-metrics-tls\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735811 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5e2e4d7d-7756-4f77-9151-fe775e3beb42-images\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: E1212 14:06:06.735826 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.235808877 +0000 UTC m=+146.661226930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.735975 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wx9p\" (UniqueName: \"kubernetes.io/projected/308ebf9e-6dd5-46a3-a344-176905e7915e-kube-api-access-5wx9p\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.736025 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5e2e4d7d-7756-4f77-9151-fe775e3beb42-proxy-tls\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737237 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5e2e4d7d-7756-4f77-9151-fe775e3beb42-images\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737620 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/191340bd-661f-4987-b348-20887c35f540-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737699 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrmw6\" (UniqueName: \"kubernetes.io/projected/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-kube-api-access-qrmw6\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737720 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-etcd-client\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737773 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-registry-certificates\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737807 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-982qs\" (UniqueName: \"kubernetes.io/projected/503bcbe4-217b-4060-83e3-b0dae775431d-kube-api-access-982qs\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737854 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fcc5eb52-de54-4535-9c8a-5158b76d54a5-tmpfs\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.737873 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xfqq\" (UniqueName: \"kubernetes.io/projected/33e2041b-8ae7-4c04-bf4f-3abce29de30d-kube-api-access-9xfqq\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738115 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-ca\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738245 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e600b0d3-bfe3-4a65-8331-ae9218a5245f-config\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738268 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/22b1ed4b-7956-4b80-8054-51a85f130dde-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-kqvdh\" (UID: \"22b1ed4b-7956-4b80-8054-51a85f130dde\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738328 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e600b0d3-bfe3-4a65-8331-ae9218a5245f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738349 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308ebf9e-6dd5-46a3-a344-176905e7915e-serving-cert\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738378 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738412 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738432 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxz6q\" (UniqueName: \"kubernetes.io/projected/ac045012-203c-455a-be9d-626711804b7c-kube-api-access-wxz6q\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738450 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5e2e4d7d-7756-4f77-9151-fe775e3beb42-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738467 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-config\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738486 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cc90392c-96be-4eda-b4ee-ef73500467e5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738505 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-serving-cert\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738527 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jw4m\" (UniqueName: \"kubernetes.io/projected/a7b5011e-2f07-4f46-8597-96fb97e199ec-kube-api-access-6jw4m\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738580 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/191340bd-661f-4987-b348-20887c35f540-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738688 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-serving-cert\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738718 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-client-ca\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738766 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-trusted-ca\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738785 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738808 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-socket-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738826 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ac045012-203c-455a-be9d-626711804b7c-node-bootstrap-token\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: E1212 14:06:06.738870 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.238855208 +0000 UTC m=+146.664273262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738892 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f64627e-8a01-439b-9401-be61f39a76cc-config-volume\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738916 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-mountpoint-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738937 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ac045012-203c-455a-be9d-626711804b7c-certs\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738955 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.738978 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w8zb\" (UniqueName: \"kubernetes.io/projected/0b25f747-6011-4a3b-8058-bb1115a6880e-kube-api-access-9w8zb\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739005 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9zn9\" (UniqueName: \"kubernetes.io/projected/7f64627e-8a01-439b-9401-be61f39a76cc-kube-api-access-f9zn9\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739052 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5kqc\" (UniqueName: \"kubernetes.io/projected/890f4608-3a85-4bae-be47-f35c7f268a29-kube-api-access-s5kqc\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739073 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-config\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739117 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6494cb84-311b-407d-b709-495ff1a2aaba-signing-cabundle\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739145 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/890f4608-3a85-4bae-be47-f35c7f268a29-node-pullsecrets\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739171 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fcc5eb52-de54-4535-9c8a-5158b76d54a5-apiservice-cert\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739189 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7b5011e-2f07-4f46-8597-96fb97e199ec-serving-cert\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.739554 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.740018 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/890f4608-3a85-4bae-be47-f35c7f268a29-node-pullsecrets\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.740214 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-config\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.741152 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742300 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742421 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn9mc\" (UniqueName: \"kubernetes.io/projected/cc90392c-96be-4eda-b4ee-ef73500467e5-kube-api-access-fn9mc\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742436 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cc90392c-96be-4eda-b4ee-ef73500467e5-proxy-tls\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742576 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/503bcbe4-217b-4060-83e3-b0dae775431d-config-volume\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742605 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-config\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742631 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg2sf\" (UniqueName: \"kubernetes.io/projected/bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0-kube-api-access-gg2sf\") pod \"ingress-canary-7krfn\" (UID: \"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0\") " pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742616 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fcc5eb52-de54-4535-9c8a-5158b76d54a5-webhook-cert\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742660 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fjq8\" (UniqueName: \"kubernetes.io/projected/fcc5eb52-de54-4535-9c8a-5158b76d54a5-kube-api-access-5fjq8\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742735 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-audit\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742773 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-plugins-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742839 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-image-import-ca\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742861 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-client-ca\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.742879 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.743267 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcjlf\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-kube-api-access-hcjlf\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.743296 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e600b0d3-bfe3-4a65-8331-ae9218a5245f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.743338 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/890f4608-3a85-4bae-be47-f35c7f268a29-audit-dir\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.743365 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6494cb84-311b-407d-b709-495ff1a2aaba-signing-key\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.743382 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-etcd-serving-ca\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.743649 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fcc5eb52-de54-4535-9c8a-5158b76d54a5-apiservice-cert\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.744552 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7b5011e-2f07-4f46-8597-96fb97e199ec-serving-cert\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.745420 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6494cb84-311b-407d-b709-495ff1a2aaba-signing-cabundle\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.746228 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fcc5eb52-de54-4535-9c8a-5158b76d54a5-tmpfs\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.746239 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-audit\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.746431 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-image-import-ca\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.748578 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-serving-cert\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.749114 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-registry-tls\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.749476 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-registry-certificates\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.749492 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.750206 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-config\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.750216 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e600b0d3-bfe3-4a65-8331-ae9218a5245f-config\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.751537 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/191340bd-661f-4987-b348-20887c35f540-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.752734 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-service-ca\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.753592 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5e2e4d7d-7756-4f77-9151-fe775e3beb42-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.754009 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-config\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.753542 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e600b0d3-bfe3-4a65-8331-ae9218a5245f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.754136 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-client-ca\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.754263 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/890f4608-3a85-4bae-be47-f35c7f268a29-audit-dir\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.754453 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-encryption-config\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.754941 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cc90392c-96be-4eda-b4ee-ef73500467e5-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.756076 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.757027 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.757089 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.758886 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6494cb84-311b-407d-b709-495ff1a2aaba-signing-key\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.761131 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-etcd-client\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.761233 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5e2e4d7d-7756-4f77-9151-fe775e3beb42-proxy-tls\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.761826 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-trusted-ca\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.768040 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-etcd-client\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.768209 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8t8q\" (UniqueName: \"kubernetes.io/projected/22b1ed4b-7956-4b80-8054-51a85f130dde-kube-api-access-q8t8q\") pod \"cluster-samples-operator-665b6dd947-kqvdh\" (UID: \"22b1ed4b-7956-4b80-8054-51a85f130dde\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.768196 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/22b1ed4b-7956-4b80-8054-51a85f130dde-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-kqvdh\" (UID: \"22b1ed4b-7956-4b80-8054-51a85f130dde\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.768526 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/890f4608-3a85-4bae-be47-f35c7f268a29-etcd-serving-ca\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.801861 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wksg\" (UniqueName: \"kubernetes.io/projected/6494cb84-311b-407d-b709-495ff1a2aaba-kube-api-access-7wksg\") pod \"service-ca-9c57cc56f-t5mzq\" (UID: \"6494cb84-311b-407d-b709-495ff1a2aaba\") " pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.803057 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/890f4608-3a85-4bae-be47-f35c7f268a29-serving-cert\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.809327 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jw4m\" (UniqueName: \"kubernetes.io/projected/a7b5011e-2f07-4f46-8597-96fb97e199ec-kube-api-access-6jw4m\") pod \"route-controller-manager-6576b87f9c-zsn76\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.815030 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-j724k"] Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.836771 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5kqc\" (UniqueName: \"kubernetes.io/projected/890f4608-3a85-4bae-be47-f35c7f268a29-kube-api-access-s5kqc\") pod \"apiserver-76f77b778f-cvblj\" (UID: \"890f4608-3a85-4bae-be47-f35c7f268a29\") " pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.843941 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844070 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308ebf9e-6dd5-46a3-a344-176905e7915e-serving-cert\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844090 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844113 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxz6q\" (UniqueName: \"kubernetes.io/projected/ac045012-203c-455a-be9d-626711804b7c-kube-api-access-wxz6q\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844130 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-client-ca\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844151 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-socket-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844165 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ac045012-203c-455a-be9d-626711804b7c-node-bootstrap-token\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844181 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f64627e-8a01-439b-9401-be61f39a76cc-config-volume\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844195 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-mountpoint-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844210 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ac045012-203c-455a-be9d-626711804b7c-certs\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844239 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9zn9\" (UniqueName: \"kubernetes.io/projected/7f64627e-8a01-439b-9401-be61f39a76cc-kube-api-access-f9zn9\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844271 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/503bcbe4-217b-4060-83e3-b0dae775431d-config-volume\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844286 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-config\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844300 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg2sf\" (UniqueName: \"kubernetes.io/projected/bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0-kube-api-access-gg2sf\") pod \"ingress-canary-7krfn\" (UID: \"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0\") " pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844319 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-plugins-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844367 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/503bcbe4-217b-4060-83e3-b0dae775431d-secret-volume\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844388 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-registration-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844420 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-csi-data-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844440 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0-cert\") pod \"ingress-canary-7krfn\" (UID: \"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0\") " pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844454 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7f64627e-8a01-439b-9401-be61f39a76cc-metrics-tls\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844468 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wx9p\" (UniqueName: \"kubernetes.io/projected/308ebf9e-6dd5-46a3-a344-176905e7915e-kube-api-access-5wx9p\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844494 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-982qs\" (UniqueName: \"kubernetes.io/projected/503bcbe4-217b-4060-83e3-b0dae775431d-kube-api-access-982qs\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.844512 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xfqq\" (UniqueName: \"kubernetes.io/projected/33e2041b-8ae7-4c04-bf4f-3abce29de30d-kube-api-access-9xfqq\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.845134 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7f64627e-8a01-439b-9401-be61f39a76cc-config-volume\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.846112 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-plugins-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.846152 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-registration-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.846314 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-csi-data-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.846710 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-mountpoint-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.846785 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.846878 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-client-ca\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: E1212 14:06:06.847074 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.34705269 +0000 UTC m=+146.772470745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.847520 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/503bcbe4-217b-4060-83e3-b0dae775431d-config-volume\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.847656 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-config\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.847697 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/33e2041b-8ae7-4c04-bf4f-3abce29de30d-socket-dir\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.849973 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ac045012-203c-455a-be9d-626711804b7c-node-bootstrap-token\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.850439 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7f64627e-8a01-439b-9401-be61f39a76cc-metrics-tls\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.851387 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308ebf9e-6dd5-46a3-a344-176905e7915e-serving-cert\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.851625 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w8zb\" (UniqueName: \"kubernetes.io/projected/0b25f747-6011-4a3b-8058-bb1115a6880e-kube-api-access-9w8zb\") pod \"marketplace-operator-79b997595-xsrh2\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.852178 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/503bcbe4-217b-4060-83e3-b0dae775431d-secret-volume\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.865119 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0-cert\") pod \"ingress-canary-7krfn\" (UID: \"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0\") " pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.865245 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ac045012-203c-455a-be9d-626711804b7c-certs\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.867693 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrmw6\" (UniqueName: \"kubernetes.io/projected/d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e-kube-api-access-qrmw6\") pod \"etcd-operator-b45778765-gtwt4\" (UID: \"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.889013 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fjq8\" (UniqueName: \"kubernetes.io/projected/fcc5eb52-de54-4535-9c8a-5158b76d54a5-kube-api-access-5fjq8\") pod \"packageserver-d55dfcdfc-67sg6\" (UID: \"fcc5eb52-de54-4535-9c8a-5158b76d54a5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.901391 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.909355 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn9mc\" (UniqueName: \"kubernetes.io/projected/cc90392c-96be-4eda-b4ee-ef73500467e5-kube-api-access-fn9mc\") pod \"machine-config-controller-84d6567774-bk68t\" (UID: \"cc90392c-96be-4eda-b4ee-ef73500467e5\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.929290 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22nfb\" (UniqueName: \"kubernetes.io/projected/5e2e4d7d-7756-4f77-9151-fe775e3beb42-kube-api-access-22nfb\") pod \"machine-config-operator-74547568cd-k85tq\" (UID: \"5e2e4d7d-7756-4f77-9151-fe775e3beb42\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.945428 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:06 crc kubenswrapper[4663]: E1212 14:06:06.946269 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.446254864 +0000 UTC m=+146.871672918 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:06 crc kubenswrapper[4663]: I1212 14:06:06.959913 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcjlf\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-kube-api-access-hcjlf\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.003229 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-bound-sa-token\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.004638 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.010937 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e600b0d3-bfe3-4a65-8331-ae9218a5245f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lqvwz\" (UID: \"e600b0d3-bfe3-4a65-8331-ae9218a5245f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.016517 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.024866 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.042868 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xfqq\" (UniqueName: \"kubernetes.io/projected/33e2041b-8ae7-4c04-bf4f-3abce29de30d-kube-api-access-9xfqq\") pod \"csi-hostpathplugin-qttz2\" (UID: \"33e2041b-8ae7-4c04-bf4f-3abce29de30d\") " pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.048156 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.048583 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.548565907 +0000 UTC m=+146.973983961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: W1212 14:06:07.053909 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4ac5e59_3c55_428e_bb44_d96db06fdc78.slice/crio-cf081e19fb7fed20e1550ceb3f10f74df8034110bce1f50026e198ea8d5bc9f9 WatchSource:0}: Error finding container cf081e19fb7fed20e1550ceb3f10f74df8034110bce1f50026e198ea8d5bc9f9: Status 404 returned error can't find the container with id cf081e19fb7fed20e1550ceb3f10f74df8034110bce1f50026e198ea8d5bc9f9 Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.062791 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.063225 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wx9p\" (UniqueName: \"kubernetes.io/projected/308ebf9e-6dd5-46a3-a344-176905e7915e-kube-api-access-5wx9p\") pod \"controller-manager-879f6c89f-8g7gt\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.067208 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.073223 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.092175 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-982qs\" (UniqueName: \"kubernetes.io/projected/503bcbe4-217b-4060-83e3-b0dae775431d-kube-api-access-982qs\") pod \"collect-profiles-29425800-bgfn9\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.093282 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9zn9\" (UniqueName: \"kubernetes.io/projected/7f64627e-8a01-439b-9401-be61f39a76cc-kube-api-access-f9zn9\") pod \"dns-default-t4pt8\" (UID: \"7f64627e-8a01-439b-9401-be61f39a76cc\") " pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.120469 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.128739 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxz6q\" (UniqueName: \"kubernetes.io/projected/ac045012-203c-455a-be9d-626711804b7c-kube-api-access-wxz6q\") pod \"machine-config-server-zxqd4\" (UID: \"ac045012-203c-455a-be9d-626711804b7c\") " pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.129953 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zzw52"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.135841 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg2sf\" (UniqueName: \"kubernetes.io/projected/bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0-kube-api-access-gg2sf\") pod \"ingress-canary-7krfn\" (UID: \"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0\") " pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.139310 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.140591 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.150806 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.151096 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.651084599 +0000 UTC m=+147.076502653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.151146 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.180064 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.195588 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.194711 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.237901 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.237939 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.251156 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.251431 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.751417409 +0000 UTC m=+147.176835463 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.252729 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.267582 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.269944 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7krfn" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.270359 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zxqd4" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.277404 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.331717 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4fxpg"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.331994 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.352997 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.353302 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.853291484 +0000 UTC m=+147.278709538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.392831 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-t5mzq"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.408583 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" event={"ID":"823b075c-2bee-4823-b938-afabe4af612a","Type":"ContainerStarted","Data":"7fb0c065b42e78e9e9010a542d62e129e6d73c06b64f590fe2a24fe8788ac63b"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.408622 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" event={"ID":"823b075c-2bee-4823-b938-afabe4af612a","Type":"ContainerStarted","Data":"4876f6f4049840767d2d4dd53dddd156b9cb562b0377ea99e9a1e2b5add3f12f"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.417424 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-f4vfm"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.422504 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-ddcb6" event={"ID":"427e7a1a-b751-4226-9c17-7a8b9226ccc9","Type":"ContainerStarted","Data":"e4f1c2f5f6a7ee3ab9fd71040d7169d62301df8e59017f4557a4d48c1cbb65a7"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.422529 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-ddcb6" event={"ID":"427e7a1a-b751-4226-9c17-7a8b9226ccc9","Type":"ContainerStarted","Data":"aecfaa65eccd36e0eb83c36ced16c6a76ec1874c73ec3d544dc0c347c5787aea"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.424386 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" event={"ID":"53b84a0d-9a65-471b-af63-19eec862c572","Type":"ContainerStarted","Data":"11e896c659bc0bb9804b4703eab62f8aac38d44a77db09fc790539308e6ff115"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.433537 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" event={"ID":"ddc512fd-9f37-4257-b51b-0d2debe5df0b","Type":"ContainerStarted","Data":"e4efa2a93f286ab65d0eda5a74efff989ab5f5092f3d007d356f62ae52aac088"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.433609 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" event={"ID":"ddc512fd-9f37-4257-b51b-0d2debe5df0b","Type":"ContainerStarted","Data":"21cb421730ee3becaf46f3cd03fc31b74426ebcf32de995cda1b4cf01745b4b2"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.447138 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9hr59" event={"ID":"7b18a264-5694-4065-92fb-a41b3f231a79","Type":"ContainerStarted","Data":"4b055beab539b89b74b570c0cf3a8d1abb0c2ec721ec727bc7c9a129cbd18237"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.447186 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9hr59" event={"ID":"7b18a264-5694-4065-92fb-a41b3f231a79","Type":"ContainerStarted","Data":"6a7f5a2f3662b24f63f69ac2cb7958c7cf8a11c9045e031442eab789db193a63"} Dec 12 14:06:07 crc kubenswrapper[4663]: W1212 14:06:07.447650 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6494cb84_311b_407d_b709_495ff1a2aaba.slice/crio-943164e4293b24af40b67b2798487ec39cd00760e385eb68304fe9aeb5fc1852 WatchSource:0}: Error finding container 943164e4293b24af40b67b2798487ec39cd00760e385eb68304fe9aeb5fc1852: Status 404 returned error can't find the container with id 943164e4293b24af40b67b2798487ec39cd00760e385eb68304fe9aeb5fc1852 Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.461808 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.461911 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.961893383 +0000 UTC m=+147.387311437 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.464722 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.464953 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:07.964945536 +0000 UTC m=+147.390363591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.473657 4663 generic.go:334] "Generic (PLEG): container finished" podID="d9b79bab-ef9e-4304-a599-90d56a93b228" containerID="46ebc6287539efc8ffc2d0aa4126c53e3943a9f5df25fd1f31dba69804129980" exitCode=0 Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.474442 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" event={"ID":"d9b79bab-ef9e-4304-a599-90d56a93b228","Type":"ContainerDied","Data":"46ebc6287539efc8ffc2d0aa4126c53e3943a9f5df25fd1f31dba69804129980"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.474481 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" event={"ID":"d9b79bab-ef9e-4304-a599-90d56a93b228","Type":"ContainerStarted","Data":"e8facc8de3734393b603c3be92217d2825b34e4034cb65a7c870d52d8fe3b52a"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.482589 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" event={"ID":"55f03b2c-5afc-42ea-8e36-f68b358e3f4f","Type":"ContainerStarted","Data":"d350b9c09ba5c8b5ef91820509794a5f1154e3a75d30d4c1012ccacf148cdd36"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.486826 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.490965 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" event={"ID":"2102ea25-a87a-4deb-b2f6-bf54ae31f08f","Type":"ContainerStarted","Data":"3a726fda1880874ee0bcf5315a58ae68b429171aa3facaa140c705c256cb0e56"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.491020 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" event={"ID":"2102ea25-a87a-4deb-b2f6-bf54ae31f08f","Type":"ContainerStarted","Data":"2a53e2012677b1f02b5773c88a16e0f654855e3a89103278ca885e04a1a2ec40"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.492942 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:07 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:07 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:07 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.492979 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.501050 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" event={"ID":"f4ac5e59-3c55-428e-bb44-d96db06fdc78","Type":"ContainerStarted","Data":"cf081e19fb7fed20e1550ceb3f10f74df8034110bce1f50026e198ea8d5bc9f9"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.507821 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" event={"ID":"27da8075-7ceb-4be8-aac2-db2a24030a24","Type":"ContainerStarted","Data":"7195bfd52a68a9bcde135ac1298d2ff12fd7231805474a495d04ad320ef0cab8"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.507914 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" event={"ID":"27da8075-7ceb-4be8-aac2-db2a24030a24","Type":"ContainerStarted","Data":"70f742f06a5fa8b7a0084117ea65e6a2df6e9ca385dc2a7124adb2d55a9d66f7"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.511397 4663 generic.go:334] "Generic (PLEG): container finished" podID="fcf24979-86e7-4a32-af0b-458a7d4df2c2" containerID="8b3d4fa6e38987dd0d54569ffc64555374eb40f3ffb60baaa639d053f5854397" exitCode=0 Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.511523 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" event={"ID":"fcf24979-86e7-4a32-af0b-458a7d4df2c2","Type":"ContainerDied","Data":"8b3d4fa6e38987dd0d54569ffc64555374eb40f3ffb60baaa639d053f5854397"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.511609 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" event={"ID":"fcf24979-86e7-4a32-af0b-458a7d4df2c2","Type":"ContainerStarted","Data":"60a74b1128789529bc1391a8e2bb62c5bd05f896414f24e396cf05df297c63eb"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.519213 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.521916 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-j724k" event={"ID":"5c7f3370-d558-4607-a4a3-9fc23d33b8a7","Type":"ContainerStarted","Data":"11667af980f0e27fa170f713f3e50bfba4a2cd9ed76fa56fc7be827335ee7d07"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.526468 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" event={"ID":"fdcb9400-6fea-46b5-ac30-975a690d67c5","Type":"ContainerStarted","Data":"25e9fc1e92ba2d289c40b6dede88bfebe13c7cecbd8996bd708a19a6e67ca972"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.527912 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" event={"ID":"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103","Type":"ContainerStarted","Data":"4c1754ca4ac510c360372577c4abe511580c30e25163795739b1ef58c4c0e082"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.527945 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" event={"ID":"ac4719c1-f4b6-4bd1-bf13-b2fbfecad103","Type":"ContainerStarted","Data":"a7e41d49ef6e73bc82918d363076bd7c69417e72ed705d273494177da1324db7"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.552245 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" event={"ID":"6553ea68-1d18-4953-8e43-09d47f2745fc","Type":"ContainerStarted","Data":"e1d222b72d6662f30d7e3bdabbe465351da1171b09dbb6863bde419b177f4479"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.552288 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" event={"ID":"6553ea68-1d18-4953-8e43-09d47f2745fc","Type":"ContainerStarted","Data":"2359e5276e05f391fd301d93c1f8838ed184a2e0b5cd5631a94d12efa9722eb3"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.552299 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" event={"ID":"6553ea68-1d18-4953-8e43-09d47f2745fc","Type":"ContainerStarted","Data":"195027844ea4c309d22d12a753ee46b0a09a371e7d0c39c4057b3d47d391156b"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.552472 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.556015 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"abc10edeffb61acd3e0bff5af73dd31733301431b521b489ceae818aa3cfe654"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.556149 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d0155f14a5199de339062020c47842e7450b12ca4960d031e5c61dca8323008f"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.559402 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6407fc00f4218eb4df5f5b2aa3a03e68ece0f8bd3c637b73d73e113db74fab39"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.559522 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"dc752beac774ed069a0861b7c3f566106e7151f7b9867a1077e7c3b0e74c3902"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.575766 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.576725 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.076710996 +0000 UTC m=+147.502129051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.578051 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-gtwt4"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.586824 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.586845 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" event={"ID":"b3f05375-5727-4536-8d9e-541cdb88127a","Type":"ContainerStarted","Data":"d3d96a5d7bb7984387143e22f43ccbdfa53bbcdc08d6b15a0de7aaa89b5ddb2a"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.586871 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" event={"ID":"b3f05375-5727-4536-8d9e-541cdb88127a","Type":"ContainerStarted","Data":"7b1ae78f142a657654543479a7b3fa7d6b7cdfce49c99a160951e2bac2cff2ad"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.641218 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" event={"ID":"990d6886-87e6-4d23-9d63-95c2e2db5525","Type":"ContainerStarted","Data":"94da2ff38c13ad698386540bcfd3bbc10e9b829675828ea84fe9e5d6427cf129"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.648071 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" event={"ID":"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c","Type":"ContainerStarted","Data":"8fe1866877fdf959fb77a5905cf26c2858b42812256d4cf658664cc6101b0900"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.661659 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"212a4e4cdd98abdd1bd0b4103ff6bfbd067f8f232c32be0204338bf04892abf2"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.661707 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"67304794994edef49e9977c28c0dea4525bcfa4b3803d3822fca6860d63ce5ee"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.664058 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:06:07 crc kubenswrapper[4663]: W1212 14:06:07.670866 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7b5011e_2f07_4f46_8597_96fb97e199ec.slice/crio-8c32fe23273fbda0c50dad60cc32950b34a4909b361f392b2f181d6a69a1dc8b WatchSource:0}: Error finding container 8c32fe23273fbda0c50dad60cc32950b34a4909b361f392b2f181d6a69a1dc8b: Status 404 returned error can't find the container with id 8c32fe23273fbda0c50dad60cc32950b34a4909b361f392b2f181d6a69a1dc8b Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.678812 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cvblj"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.679678 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.680389 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" event={"ID":"317dd582-032f-45be-ab3d-30f199ac2261","Type":"ContainerStarted","Data":"76ba34500c6f1889c7630f0d61f8835ceac790628415619959dc1cf5d3025c21"} Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.681143 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.181127531 +0000 UTC m=+147.606545585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.683429 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.688112 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" event={"ID":"a39261d0-7d6f-4f3a-b741-e476ea306d9a","Type":"ContainerStarted","Data":"f50484a9f039b9b18bca46236d5a7a8ab43fe4690e7f0451bcd0848ed2e0e767"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.698068 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" event={"ID":"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b","Type":"ContainerStarted","Data":"5b3ab79d53b87adab6ff49284629a93618710eea75227d87cbaeac95612d05ac"} Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.727347 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.765143 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsrh2"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.780968 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.782003 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.28198321 +0000 UTC m=+147.707401264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.820963 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz"] Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.882790 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.882992 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6"] Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.883043 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.383028834 +0000 UTC m=+147.808446888 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.931102 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.946655 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-gw8zt" podStartSLOduration=123.946640868 podStartE2EDuration="2m3.946640868s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:07.903986595 +0000 UTC m=+147.329404648" watchObservedRunningTime="2025-12-12 14:06:07.946640868 +0000 UTC m=+147.372058922" Dec 12 14:06:07 crc kubenswrapper[4663]: I1212 14:06:07.983212 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:07 crc kubenswrapper[4663]: E1212 14:06:07.983422 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.483408242 +0000 UTC m=+147.908826296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.084405 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.084805 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.584789795 +0000 UTC m=+148.010207849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.143515 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-qmlvn" podStartSLOduration=125.143501676 podStartE2EDuration="2m5.143501676s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.143125942 +0000 UTC m=+147.568543997" watchObservedRunningTime="2025-12-12 14:06:08.143501676 +0000 UTC m=+147.568919730" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.185913 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.185914 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-ddcb6" podStartSLOduration=124.185898618 podStartE2EDuration="2m4.185898618s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.185474594 +0000 UTC m=+147.610892648" watchObservedRunningTime="2025-12-12 14:06:08.185898618 +0000 UTC m=+147.611316672" Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.187453 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.687437879 +0000 UTC m=+148.112855933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.259402 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8g7gt"] Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.274966 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-9hr59" podStartSLOduration=125.274948948 podStartE2EDuration="2m5.274948948s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.274603962 +0000 UTC m=+147.700022016" watchObservedRunningTime="2025-12-12 14:06:08.274948948 +0000 UTC m=+147.700367002" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.280663 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq"] Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.288271 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.289673 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.789655177 +0000 UTC m=+148.215073231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.290511 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7krfn"] Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.338090 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4c6jc" podStartSLOduration=125.338075262 podStartE2EDuration="2m5.338075262s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.310130594 +0000 UTC m=+147.735548648" watchObservedRunningTime="2025-12-12 14:06:08.338075262 +0000 UTC m=+147.763493317" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.360032 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9"] Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.372092 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-t4pt8"] Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.395796 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.396225 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.896211096 +0000 UTC m=+148.321629149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: W1212 14:06:08.427523 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e2e4d7d_7756_4f77_9151_fe775e3beb42.slice/crio-6aa2700ba26a090c13e727df3f954a1591a2d5a002f9841f3dfaad2bfd4582b0 WatchSource:0}: Error finding container 6aa2700ba26a090c13e727df3f954a1591a2d5a002f9841f3dfaad2bfd4582b0: Status 404 returned error can't find the container with id 6aa2700ba26a090c13e727df3f954a1591a2d5a002f9841f3dfaad2bfd4582b0 Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.430473 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gxgm9" podStartSLOduration=124.430456609 podStartE2EDuration="2m4.430456609s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.427798464 +0000 UTC m=+147.853216528" watchObservedRunningTime="2025-12-12 14:06:08.430456609 +0000 UTC m=+147.855874663" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.472053 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qttz2"] Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.497349 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:08 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:08 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:08 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.497401 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.498104 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.498385 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:08.998376746 +0000 UTC m=+148.423794801 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: W1212 14:06:08.533816 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod503bcbe4_217b_4060_83e3_b0dae775431d.slice/crio-d550dcf9e2bfef978a6b98755cb390f7d6755870574f105c2ed78e0f157f6146 WatchSource:0}: Error finding container d550dcf9e2bfef978a6b98755cb390f7d6755870574f105c2ed78e0f157f6146: Status 404 returned error can't find the container with id d550dcf9e2bfef978a6b98755cb390f7d6755870574f105c2ed78e0f157f6146 Dec 12 14:06:08 crc kubenswrapper[4663]: W1212 14:06:08.560977 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f64627e_8a01_439b_9401_be61f39a76cc.slice/crio-2203fc717547ac16e0863902d99dae4629fe4e60c0cda4ddd33d4f74a1c6b84c WatchSource:0}: Error finding container 2203fc717547ac16e0863902d99dae4629fe4e60c0cda4ddd33d4f74a1c6b84c: Status 404 returned error can't find the container with id 2203fc717547ac16e0863902d99dae4629fe4e60c0cda4ddd33d4f74a1c6b84c Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.600335 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.600419 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.10039964 +0000 UTC m=+148.525817695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.601121 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.601428 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.101421023 +0000 UTC m=+148.526839076 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.708764 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.709210 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.209197636 +0000 UTC m=+148.634615690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.716425 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" podStartSLOduration=124.710525802 podStartE2EDuration="2m4.710525802s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.709331216 +0000 UTC m=+148.134749269" watchObservedRunningTime="2025-12-12 14:06:08.710525802 +0000 UTC m=+148.135943857" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.758469 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-whkx5" podStartSLOduration=125.758452145 podStartE2EDuration="2m5.758452145s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.758068657 +0000 UTC m=+148.183486711" watchObservedRunningTime="2025-12-12 14:06:08.758452145 +0000 UTC m=+148.183870199" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.764125 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" event={"ID":"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e","Type":"ContainerStarted","Data":"95ace74fb8a10ced50f0c70ef6297c7b399d5a17435414925b765f5d4e11bbc5"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.809818 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" event={"ID":"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c","Type":"ContainerStarted","Data":"7a4427d5d78e65802573943732d9483b4df83ef81ab5b4532ed308c5583dc971"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.809866 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" event={"ID":"dab80307-f3f3-4fda-b4a7-eb0da7c60a9c","Type":"ContainerStarted","Data":"a742cec245bcc9ba416accb4b98b5bd20764b8dc6a669ea11319920be8741813"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.810222 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.810510 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.310500312 +0000 UTC m=+148.735918365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.833144 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" event={"ID":"e600b0d3-bfe3-4a65-8331-ae9218a5245f","Type":"ContainerStarted","Data":"87b3a729f029e2867fd640d523b1e09544647338e5a2093f3999fa8e4ce77e07"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.833177 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" event={"ID":"e600b0d3-bfe3-4a65-8331-ae9218a5245f","Type":"ContainerStarted","Data":"f969446e28c64fdf1a9c6e4ac97e097de168e79050e962ed64dfa7f0a3a0c7fb"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.908422 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-lq2p4" podStartSLOduration=124.908351036 podStartE2EDuration="2m4.908351036s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.83783548 +0000 UTC m=+148.263253534" watchObservedRunningTime="2025-12-12 14:06:08.908351036 +0000 UTC m=+148.333769089" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.914193 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:08 crc kubenswrapper[4663]: E1212 14:06:08.915924 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.415903743 +0000 UTC m=+148.841321798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.919170 4663 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xsrh2 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.919225 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.925050 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.925074 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" event={"ID":"cc90392c-96be-4eda-b4ee-ef73500467e5","Type":"ContainerStarted","Data":"f83588a691678ba3008459a6b2d441a04cef40f7202a8f4938c9e5c9bc835deb"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.925096 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" event={"ID":"0b25f747-6011-4a3b-8058-bb1115a6880e","Type":"ContainerStarted","Data":"3f384b0dd19a2bb370113ec5c9d29629bbed588decb26780b4956c7afc2e8066"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.937346 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" event={"ID":"fcf24979-86e7-4a32-af0b-458a7d4df2c2","Type":"ContainerStarted","Data":"a69e78fdef8f17e528f94905e734dd2bf7f84832ddc82cfff8488bd8f56a83ca"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.977180 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" event={"ID":"a7b5011e-2f07-4f46-8597-96fb97e199ec","Type":"ContainerStarted","Data":"57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.977229 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" event={"ID":"a7b5011e-2f07-4f46-8597-96fb97e199ec","Type":"ContainerStarted","Data":"8c32fe23273fbda0c50dad60cc32950b34a4909b361f392b2f181d6a69a1dc8b"} Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.977467 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.983837 4663 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-zsn76 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 12 14:06:08 crc kubenswrapper[4663]: I1212 14:06:08.983875 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" podUID="a7b5011e-2f07-4f46-8597-96fb97e199ec" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.019021 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.019937 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.519925669 +0000 UTC m=+148.945343723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.021247 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" event={"ID":"d9b79bab-ef9e-4304-a599-90d56a93b228","Type":"ContainerStarted","Data":"be3a463ffadebcf2e8260638b8add6030cc7a9c85cabf43c7272a17f01fd9096"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.022969 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" podStartSLOduration=126.022958215 podStartE2EDuration="2m6.022958215s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:08.992284739 +0000 UTC m=+148.417702793" watchObservedRunningTime="2025-12-12 14:06:09.022958215 +0000 UTC m=+148.448376269" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.025264 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.052009 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" event={"ID":"f4ac5e59-3c55-428e-bb44-d96db06fdc78","Type":"ContainerStarted","Data":"26c8d8649de5fb36223eb26882aa8417c4694b3b89a7169f73d79933e33523c6"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.052884 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.067379 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qqc4q" podStartSLOduration=126.067364118 podStartE2EDuration="2m6.067364118s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.040351203 +0000 UTC m=+148.465769257" watchObservedRunningTime="2025-12-12 14:06:09.067364118 +0000 UTC m=+148.492782172" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.068213 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" podStartSLOduration=125.06820961 podStartE2EDuration="2m5.06820961s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.067061241 +0000 UTC m=+148.492479295" watchObservedRunningTime="2025-12-12 14:06:09.06820961 +0000 UTC m=+148.493627664" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.073886 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" event={"ID":"6494cb84-311b-407d-b709-495ff1a2aaba","Type":"ContainerStarted","Data":"4a46cbfde3d58179c7ab089c80f0d5cc3f5dcb4158480e5c22cb3df20c958ed5"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.073918 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" event={"ID":"6494cb84-311b-407d-b709-495ff1a2aaba","Type":"ContainerStarted","Data":"943164e4293b24af40b67b2798487ec39cd00760e385eb68304fe9aeb5fc1852"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.077860 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zxqd4" event={"ID":"ac045012-203c-455a-be9d-626711804b7c","Type":"ContainerStarted","Data":"a52cbcce26d09b96853f55f8c5f75d171d05902582696352215430df385fa898"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.080136 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.097126 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" event={"ID":"55f03b2c-5afc-42ea-8e36-f68b358e3f4f","Type":"ContainerStarted","Data":"c9473d8cd58a160019a35f0783a3a8a074b4026aa9ef529dd622bac6f0766510"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.104003 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" event={"ID":"53b84a0d-9a65-471b-af63-19eec862c572","Type":"ContainerStarted","Data":"dcf01bcd5e1ca457c82f9743106098265651145dac2767c2b687aedd3eca962b"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.105025 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.105722 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" podStartSLOduration=126.105699778 podStartE2EDuration="2m6.105699778s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.105172951 +0000 UTC m=+148.530591005" watchObservedRunningTime="2025-12-12 14:06:09.105699778 +0000 UTC m=+148.531117832" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.118005 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" event={"ID":"5e2e4d7d-7756-4f77-9151-fe775e3beb42","Type":"ContainerStarted","Data":"6aa2700ba26a090c13e727df3f954a1591a2d5a002f9841f3dfaad2bfd4582b0"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.122673 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.123988 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.62397172 +0000 UTC m=+149.049389764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.126926 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.132868 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" event={"ID":"fdcb9400-6fea-46b5-ac30-975a690d67c5","Type":"ContainerStarted","Data":"4ce8897147db7f2741deae05fea7ae820ca946067adbf7b952d469f0174ff69d"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.134577 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" podStartSLOduration=125.134568015 podStartE2EDuration="2m5.134568015s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.134013176 +0000 UTC m=+148.559431230" watchObservedRunningTime="2025-12-12 14:06:09.134568015 +0000 UTC m=+148.559986069" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.152445 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" event={"ID":"9ddec9ff-ee94-46f2-a2fb-d6db1c3bfd4b","Type":"ContainerStarted","Data":"a6d35fad123c46514ca687c1e7405b4cecdbf8a3d0634902122b4b749cbf9cf2"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.159961 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" event={"ID":"308ebf9e-6dd5-46a3-a344-176905e7915e","Type":"ContainerStarted","Data":"58e8cbef8d1cfe1f9456b9865103ac368ec3d532d81ec1b5c93a4cfadcf3dfbb"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.160234 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" podStartSLOduration=125.160219521 podStartE2EDuration="2m5.160219521s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.1589641 +0000 UTC m=+148.584382155" watchObservedRunningTime="2025-12-12 14:06:09.160219521 +0000 UTC m=+148.585637575" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.160953 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.167418 4663 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-8g7gt container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.167526 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" podUID="308ebf9e-6dd5-46a3-a344-176905e7915e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.174413 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-j724k" event={"ID":"5c7f3370-d558-4607-a4a3-9fc23d33b8a7","Type":"ContainerStarted","Data":"2036c3c664703ccb69a0e6a4ecde0503f032ac88ef06bacaba09503a904f5614"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.188359 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lqvwz" podStartSLOduration=125.188347072 podStartE2EDuration="2m5.188347072s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.184910649 +0000 UTC m=+148.610328704" watchObservedRunningTime="2025-12-12 14:06:09.188347072 +0000 UTC m=+148.613765126" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.189351 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7krfn" event={"ID":"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0","Type":"ContainerStarted","Data":"ea97b9d8c7474abed08cfe7ca13db33c046b8520c7f4bc25b352a9417254bb11"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.204104 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" event={"ID":"22b1ed4b-7956-4b80-8054-51a85f130dde","Type":"ContainerStarted","Data":"3ebf60d3d56d4f1b94d22b735ee46152d2037b8490d1ba9a0eaeec3740c60d42"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.217226 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" event={"ID":"fcc5eb52-de54-4535-9c8a-5158b76d54a5","Type":"ContainerStarted","Data":"7c943ed7b53ba3dcb4603d073facfb5abbb200119d4903f21e85847122eab4c3"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.217673 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.220147 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4fxpg" event={"ID":"adfbdf33-01b1-454d-bb33-b32818244bc2","Type":"ContainerStarted","Data":"2d6355621f4ae29834255877d8adf2ef6fbb6c5e7bdf09d037ee7ef146b75cf2"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.220187 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4fxpg" event={"ID":"adfbdf33-01b1-454d-bb33-b32818244bc2","Type":"ContainerStarted","Data":"5fda32e0b101969675b6cd60230cc65ac0a49444a2085c1d6be05c3cdbd84c28"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.220780 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.223775 4663 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fxpg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.223873 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fxpg" podUID="adfbdf33-01b1-454d-bb33-b32818244bc2" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.223817 4663 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-67sg6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" start-of-body= Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.224030 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" podUID="fcc5eb52-de54-4535-9c8a-5158b76d54a5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.224626 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.225703 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.725689442 +0000 UTC m=+149.151107496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.227049 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t4pt8" event={"ID":"7f64627e-8a01-439b-9401-be61f39a76cc","Type":"ContainerStarted","Data":"2203fc717547ac16e0863902d99dae4629fe4e60c0cda4ddd33d4f74a1c6b84c"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.230263 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" event={"ID":"503bcbe4-217b-4060-83e3-b0dae775431d","Type":"ContainerStarted","Data":"d550dcf9e2bfef978a6b98755cb390f7d6755870574f105c2ed78e0f157f6146"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.233790 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" event={"ID":"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d","Type":"ContainerStarted","Data":"cf111b2ab33c0525c30e833c0e964e91db4c58159611877a622090ce02d17f50"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.233822 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" event={"ID":"7498af08-fd9e-4439-a0d7-5e5a8d5cc92d","Type":"ContainerStarted","Data":"118b8639f1a1a4a37d06a79cac9f473f330159376efd0a995db51de4c5b43f29"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.239408 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z9g5q" podStartSLOduration=125.239398404 podStartE2EDuration="2m5.239398404s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.236377449 +0000 UTC m=+148.661795503" watchObservedRunningTime="2025-12-12 14:06:09.239398404 +0000 UTC m=+148.664816458" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.274008 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f4vfm" event={"ID":"6ac5aa32-7726-427e-99ee-19b74838d5b6","Type":"ContainerStarted","Data":"6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.274066 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f4vfm" event={"ID":"6ac5aa32-7726-427e-99ee-19b74838d5b6","Type":"ContainerStarted","Data":"e37d9b904cb1ea9ba71b6eef5d19f8336e0fb612a361e2fccf6b4ce4b49d2169"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.303674 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" event={"ID":"890f4608-3a85-4bae-be47-f35c7f268a29","Type":"ContainerStarted","Data":"5b6035da8cfa9311ec4156ffcb0c8a490c50202f9293091a3f5fdd039b8bd495"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.308737 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" event={"ID":"33e2041b-8ae7-4c04-bf4f-3abce29de30d","Type":"ContainerStarted","Data":"363efd6847e4cbe6d5c4ff5a7cb0102a9d8373dff3c793384105ac782848d212"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.311399 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" event={"ID":"43fb274b-4188-4db5-a4f0-f7c18bca433e","Type":"ContainerStarted","Data":"8f2f2b7cdf78ad453b3e875b466d9bf741416de012e4a2f0836bb12b692f561e"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.311429 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" event={"ID":"43fb274b-4188-4db5-a4f0-f7c18bca433e","Type":"ContainerStarted","Data":"b32c997d978faf30825bfe09510009ec3aa557efa080816fe99e55af18f0e0d5"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.325693 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.326696 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.826682848 +0000 UTC m=+149.252100902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.335435 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" podStartSLOduration=126.335421717 podStartE2EDuration="2m6.335421717s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.274074063 +0000 UTC m=+148.699492116" watchObservedRunningTime="2025-12-12 14:06:09.335421717 +0000 UTC m=+148.760839771" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.337636 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" event={"ID":"ddc512fd-9f37-4257-b51b-0d2debe5df0b","Type":"ContainerStarted","Data":"4d55d000fe7bc38a2ed9c55735669b285bc137afbf7aab666e5b7ed54e8829d3"} Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.342158 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.358225 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-49pbd" podStartSLOduration=125.358207879 podStartE2EDuration="2m5.358207879s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.336543497 +0000 UTC m=+148.761961551" watchObservedRunningTime="2025-12-12 14:06:09.358207879 +0000 UTC m=+148.783625934" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.361439 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-9hr59" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.422853 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bc9bd" podStartSLOduration=125.422835995 podStartE2EDuration="2m5.422835995s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.361184372 +0000 UTC m=+148.786602426" watchObservedRunningTime="2025-12-12 14:06:09.422835995 +0000 UTC m=+148.848254129" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.438093 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.443097 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-j724k" podStartSLOduration=125.443080881 podStartE2EDuration="2m5.443080881s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.423503003 +0000 UTC m=+148.848921058" watchObservedRunningTime="2025-12-12 14:06:09.443080881 +0000 UTC m=+148.868498936" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.443577 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wpq62" podStartSLOduration=125.443573103 podStartE2EDuration="2m5.443573103s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.440685446 +0000 UTC m=+148.866103501" watchObservedRunningTime="2025-12-12 14:06:09.443573103 +0000 UTC m=+148.868991156" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.448352 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:09.948337862 +0000 UTC m=+149.373755916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.492281 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:09 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:09 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:09 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.492554 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.509607 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-67kxr" podStartSLOduration=126.509585669 podStartE2EDuration="2m6.509585669s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.509138132 +0000 UTC m=+148.934556186" watchObservedRunningTime="2025-12-12 14:06:09.509585669 +0000 UTC m=+148.935003723" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.510379 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zcz24" podStartSLOduration=125.510375268 podStartE2EDuration="2m5.510375268s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.479620318 +0000 UTC m=+148.905038372" watchObservedRunningTime="2025-12-12 14:06:09.510375268 +0000 UTC m=+148.935793321" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.540883 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.541149 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.041136328 +0000 UTC m=+149.466554382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.592614 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" podStartSLOduration=125.592547053 podStartE2EDuration="2m5.592547053s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.55704635 +0000 UTC m=+148.982464404" watchObservedRunningTime="2025-12-12 14:06:09.592547053 +0000 UTC m=+149.017965107" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.592790 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" podStartSLOduration=126.59278617 podStartE2EDuration="2m6.59278617s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.587938145 +0000 UTC m=+149.013356200" watchObservedRunningTime="2025-12-12 14:06:09.59278617 +0000 UTC m=+149.018204224" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.640137 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-zxqd4" podStartSLOduration=5.640120363 podStartE2EDuration="5.640120363s" podCreationTimestamp="2025-12-12 14:06:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.639239766 +0000 UTC m=+149.064657819" watchObservedRunningTime="2025-12-12 14:06:09.640120363 +0000 UTC m=+149.065538418" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.643335 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.643616 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.143605388 +0000 UTC m=+149.569023442 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.675720 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-4fxpg" podStartSLOduration=126.675707418 podStartE2EDuration="2m6.675707418s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.673629509 +0000 UTC m=+149.099047563" watchObservedRunningTime="2025-12-12 14:06:09.675707418 +0000 UTC m=+149.101125472" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.709049 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-t5mzq" podStartSLOduration=125.709031817 podStartE2EDuration="2m5.709031817s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.70702498 +0000 UTC m=+149.132443034" watchObservedRunningTime="2025-12-12 14:06:09.709031817 +0000 UTC m=+149.134449871" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.716055 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-82xhz"] Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.716843 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.734317 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-82xhz"] Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.736131 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.745050 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.745200 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q8d4\" (UniqueName: \"kubernetes.io/projected/6fc61e95-424c-4039-92c6-2a07590312d6-kube-api-access-4q8d4\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.745261 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-utilities\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.745289 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-catalog-content\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.745387 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.245374686 +0000 UTC m=+149.670792740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.808894 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v2jqd" podStartSLOduration=125.808882455 podStartE2EDuration="2m5.808882455s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.806193722 +0000 UTC m=+149.231611776" watchObservedRunningTime="2025-12-12 14:06:09.808882455 +0000 UTC m=+149.234300499" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.846400 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-utilities\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.846445 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-catalog-content\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.846478 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.846510 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q8d4\" (UniqueName: \"kubernetes.io/projected/6fc61e95-424c-4039-92c6-2a07590312d6-kube-api-access-4q8d4\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.846997 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-utilities\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.847195 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-catalog-content\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.847384 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.347374136 +0000 UTC m=+149.772792190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.877326 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qrzws" podStartSLOduration=125.877313248 podStartE2EDuration="2m5.877313248s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:09.87613374 +0000 UTC m=+149.301551794" watchObservedRunningTime="2025-12-12 14:06:09.877313248 +0000 UTC m=+149.302731302" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.903513 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q8d4\" (UniqueName: \"kubernetes.io/projected/6fc61e95-424c-4039-92c6-2a07590312d6-kube-api-access-4q8d4\") pod \"certified-operators-82xhz\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.933110 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8gc87"] Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.933858 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.940948 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.948364 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.948487 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bxsb\" (UniqueName: \"kubernetes.io/projected/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-kube-api-access-2bxsb\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.948573 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-utilities\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.948594 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-catalog-content\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:09 crc kubenswrapper[4663]: E1212 14:06:09.948713 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.448701307 +0000 UTC m=+149.874119361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:09 crc kubenswrapper[4663]: I1212 14:06:09.967774 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8gc87"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.036317 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.047772 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-f4vfm" podStartSLOduration=127.047759432 podStartE2EDuration="2m7.047759432s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.016699782 +0000 UTC m=+149.442117836" watchObservedRunningTime="2025-12-12 14:06:10.047759432 +0000 UTC m=+149.473177486" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.049654 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-utilities\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.049691 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-catalog-content\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.049792 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.049827 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bxsb\" (UniqueName: \"kubernetes.io/projected/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-kube-api-access-2bxsb\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.050372 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-utilities\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.050589 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-catalog-content\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.050833 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.550821223 +0000 UTC m=+149.976239277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.093587 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bxsb\" (UniqueName: \"kubernetes.io/projected/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-kube-api-access-2bxsb\") pod \"community-operators-8gc87\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.136185 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-klhsb"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.136923 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.150703 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.150914 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-klhsb"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.151060 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-catalog-content\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.151140 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-utilities\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.151186 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fsc4\" (UniqueName: \"kubernetes.io/projected/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-kube-api-access-9fsc4\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.151303 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.651273748 +0000 UTC m=+150.076691801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.252390 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-catalog-content\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.252449 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.252470 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-utilities\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.252506 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fsc4\" (UniqueName: \"kubernetes.io/projected/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-kube-api-access-9fsc4\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.252828 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.752812905 +0000 UTC m=+150.178230949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.253094 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-catalog-content\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.253145 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-utilities\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.282350 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fsc4\" (UniqueName: \"kubernetes.io/projected/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-kube-api-access-9fsc4\") pod \"certified-operators-klhsb\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.283627 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.313512 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qz6sl"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.314437 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.333098 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qz6sl"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.353762 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.356653 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-utilities\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.356727 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-catalog-content\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.356825 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8txm\" (UniqueName: \"kubernetes.io/projected/47373cbe-7df3-4621-adaf-cf8b6ec73f61-kube-api-access-w8txm\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.365645 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" event={"ID":"fcc5eb52-de54-4535-9c8a-5158b76d54a5","Type":"ContainerStarted","Data":"4edc035b70a580317ea0abcbfd9c53a88f6afb5f88befb1ef110679ac5e82c47"} Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.367525 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.867499615 +0000 UTC m=+150.292917668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.412253 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" event={"ID":"503bcbe4-217b-4060-83e3-b0dae775431d","Type":"ContainerStarted","Data":"ac96afdc0a6a4855a7d0b9d5a2db5dfb30af17f534353e92f0ccfa36a1a0ed9e"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.434562 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-j724k" event={"ID":"5c7f3370-d558-4607-a4a3-9fc23d33b8a7","Type":"ContainerStarted","Data":"124773a564d3a80d4cae0a4093689346e826d7c022d553f2f8f7c781948b2ad8"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.436227 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" event={"ID":"308ebf9e-6dd5-46a3-a344-176905e7915e","Type":"ContainerStarted","Data":"8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.444971 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.448508 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" event={"ID":"cc90392c-96be-4eda-b4ee-ef73500467e5","Type":"ContainerStarted","Data":"e3d7e32e2b0687aba6da1e1bd7010f55bd38b32fe3f6ec2abd9736767e502ea7"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.448560 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" event={"ID":"cc90392c-96be-4eda-b4ee-ef73500467e5","Type":"ContainerStarted","Data":"c75422d28384be05c862e958194e59c11929ba01c384275c8f6f407b4634e791"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.455606 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.459173 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8txm\" (UniqueName: \"kubernetes.io/projected/47373cbe-7df3-4621-adaf-cf8b6ec73f61-kube-api-access-w8txm\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.459253 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.459291 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-utilities\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.459344 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-catalog-content\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.461023 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:10.961014672 +0000 UTC m=+150.386432726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.461633 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-utilities\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.462464 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-catalog-content\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.468716 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-82xhz"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474834 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t4pt8" event={"ID":"7f64627e-8a01-439b-9401-be61f39a76cc","Type":"ContainerStarted","Data":"8acc7b9ffb4bb8b8a65af32c754077391f11e4f29b68dfcb44e4b96c01b789b9"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474870 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t4pt8" event={"ID":"7f64627e-8a01-439b-9401-be61f39a76cc","Type":"ContainerStarted","Data":"0946c36e8ce4cff7adf07e8a4d1fe330069029e482015f8e12062b825091f8b0"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474890 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474901 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" event={"ID":"33e2041b-8ae7-4c04-bf4f-3abce29de30d","Type":"ContainerStarted","Data":"561b32058704e920ba8bcf475b2cb705af3a9259696ff65f8fa199b8315d27fa"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474913 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" event={"ID":"22b1ed4b-7956-4b80-8054-51a85f130dde","Type":"ContainerStarted","Data":"02816dc1f3c3eb7a965b8c01dc06262bea4c7b43700bf4bf7b4f80ff35a63b2f"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474923 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" event={"ID":"22b1ed4b-7956-4b80-8054-51a85f130dde","Type":"ContainerStarted","Data":"ad2e09a3fd9fa3c36b293aa1d37345b05adc8a2beead374230f42183b7861ca9"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.474933 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zxqd4" event={"ID":"ac045012-203c-455a-be9d-626711804b7c","Type":"ContainerStarted","Data":"05d48cebd4e843884785cabd52856a296848baaad60351c1443f0d793c62cc8f"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.475367 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" event={"ID":"5e2e4d7d-7756-4f77-9151-fe775e3beb42","Type":"ContainerStarted","Data":"e566a86dec088923e37fd33d9f4c952ef98e27b671910271e2124a3365b58418"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.475392 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" event={"ID":"5e2e4d7d-7756-4f77-9151-fe775e3beb42","Type":"ContainerStarted","Data":"56f1192dc5ec8b0ae4f781ab600b25fe4e748a37c63a0710dc82d11b50e1d7ba"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.497859 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" event={"ID":"55f03b2c-5afc-42ea-8e36-f68b358e3f4f","Type":"ContainerStarted","Data":"e2932be56fb93cd6824e06262a071a237a59ffbd370bf8f71c80672bd3ddff9b"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.499758 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-bk68t" podStartSLOduration=126.499733889 podStartE2EDuration="2m6.499733889s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.497494738 +0000 UTC m=+149.922912792" watchObservedRunningTime="2025-12-12 14:06:10.499733889 +0000 UTC m=+149.925151944" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.502800 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-67sg6" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.502909 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8txm\" (UniqueName: \"kubernetes.io/projected/47373cbe-7df3-4621-adaf-cf8b6ec73f61-kube-api-access-w8txm\") pod \"community-operators-qz6sl\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.517988 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7krfn" event={"ID":"bf0d6d3e-ccb4-4cef-a1ab-9977254d9ca0","Type":"ContainerStarted","Data":"1815249804e5c7fa426c1ec26db2624cd335312c0ddfba9af056f15c87c7bec9"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.518137 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:10 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:10 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:10 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.518342 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.523731 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kqvdh" podStartSLOduration=127.523705711 podStartE2EDuration="2m7.523705711s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.523618089 +0000 UTC m=+149.949036142" watchObservedRunningTime="2025-12-12 14:06:10.523705711 +0000 UTC m=+149.949123766" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.550402 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-t4pt8" podStartSLOduration=6.550388118 podStartE2EDuration="6.550388118s" podCreationTimestamp="2025-12-12 14:06:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.549253815 +0000 UTC m=+149.974671869" watchObservedRunningTime="2025-12-12 14:06:10.550388118 +0000 UTC m=+149.975806172" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.560315 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" event={"ID":"d88714bd-dbc4-45b7-ac3a-d9a5175d8e9e","Type":"ContainerStarted","Data":"3ea66fdfa37856d351c3b814e98aaef29cd9458ae196564022b8d26a97abf137"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.561060 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.561962 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:11.061950871 +0000 UTC m=+150.487368925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.571431 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" event={"ID":"0b25f747-6011-4a3b-8058-bb1115a6880e","Type":"ContainerStarted","Data":"629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.572241 4663 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xsrh2 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.572271 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.586101 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k85tq" podStartSLOduration=126.586089386 podStartE2EDuration="2m6.586089386s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.572034307 +0000 UTC m=+149.997452361" watchObservedRunningTime="2025-12-12 14:06:10.586089386 +0000 UTC m=+150.011507439" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.606387 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-7krfn" podStartSLOduration=6.606374478 podStartE2EDuration="6.606374478s" podCreationTimestamp="2025-12-12 14:06:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.605928322 +0000 UTC m=+150.031346376" watchObservedRunningTime="2025-12-12 14:06:10.606374478 +0000 UTC m=+150.031792531" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.613322 4663 generic.go:334] "Generic (PLEG): container finished" podID="890f4608-3a85-4bae-be47-f35c7f268a29" containerID="93edd16bc393e6026ec7ecbb511963f1120b6acda0a86e4a3b00d7967f34ed84" exitCode=0 Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.614356 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" event={"ID":"890f4608-3a85-4bae-be47-f35c7f268a29","Type":"ContainerStarted","Data":"36314c71175c1cbd6182cefd1e2149319825c5027b7e78783a7469351143a4fb"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.614384 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" event={"ID":"890f4608-3a85-4bae-be47-f35c7f268a29","Type":"ContainerStarted","Data":"3fec6ff5c75a66d9087f667a953e5e5384f0cb2ce514ad7e54d1ca185eb97a3c"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.614394 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" event={"ID":"890f4608-3a85-4bae-be47-f35c7f268a29","Type":"ContainerDied","Data":"93edd16bc393e6026ec7ecbb511963f1120b6acda0a86e4a3b00d7967f34ed84"} Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.615145 4663 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fxpg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.615192 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fxpg" podUID="adfbdf33-01b1-454d-bb33-b32818244bc2" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.620908 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.663552 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.665352 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:11.165340905 +0000 UTC m=+150.590758959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.673294 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.697453 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-gtwt4" podStartSLOduration=126.697439379 podStartE2EDuration="2m6.697439379s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.65452026 +0000 UTC m=+150.079938314" watchObservedRunningTime="2025-12-12 14:06:10.697439379 +0000 UTC m=+150.122857433" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.736036 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-zzw52" podStartSLOduration=126.736020788 podStartE2EDuration="2m6.736020788s" podCreationTimestamp="2025-12-12 14:04:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.697702611 +0000 UTC m=+150.123120666" watchObservedRunningTime="2025-12-12 14:06:10.736020788 +0000 UTC m=+150.161438842" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.765117 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.765272 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:11.265255883 +0000 UTC m=+150.690673937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.765475 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.765771 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:11.265763693 +0000 UTC m=+150.691181748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.847274 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" podStartSLOduration=127.847259232 podStartE2EDuration="2m7.847259232s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:10.80106874 +0000 UTC m=+150.226486794" watchObservedRunningTime="2025-12-12 14:06:10.847259232 +0000 UTC m=+150.272677287" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.868586 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.868851 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-12 14:06:11.368837915 +0000 UTC m=+150.794255969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.901760 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8gc87"] Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.934820 4663 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.972376 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:10 crc kubenswrapper[4663]: E1212 14:06:10.972717 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-12 14:06:11.472706343 +0000 UTC m=+150.898124397 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ns242" (UID: "191340bd-661f-4987-b348-20887c35f540") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 12 14:06:10 crc kubenswrapper[4663]: I1212 14:06:10.975349 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.022877 4663 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-12T14:06:10.934840754Z","Handler":null,"Name":""} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.027934 4663 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.027954 4663 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.034332 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.034839 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.045466 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.073415 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.087258 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.168367 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-klhsb"] Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.179669 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.202618 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qz6sl"] Dec 12 14:06:11 crc kubenswrapper[4663]: W1212 14:06:11.205896 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd11b55a2_e4a0_47e6_a9a9_5b935aabfe4a.slice/crio-45d46f4f2050565e7f43f933260ebadcce90d86945a6917bf014666620c65b3c WatchSource:0}: Error finding container 45d46f4f2050565e7f43f933260ebadcce90d86945a6917bf014666620c65b3c: Status 404 returned error can't find the container with id 45d46f4f2050565e7f43f933260ebadcce90d86945a6917bf014666620c65b3c Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.242455 4663 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.242494 4663 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.321877 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ns242\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.490666 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:11 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:11 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:11 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.490711 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:11 crc kubenswrapper[4663]: E1212 14:06:11.507720 4663 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd11b55a2_e4a0_47e6_a9a9_5b935aabfe4a.slice/crio-conmon-652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5.scope\": RecentStats: unable to find data in memory cache]" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.595610 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.617609 4663 generic.go:334] "Generic (PLEG): container finished" podID="503bcbe4-217b-4060-83e3-b0dae775431d" containerID="ac96afdc0a6a4855a7d0b9d5a2db5dfb30af17f534353e92f0ccfa36a1a0ed9e" exitCode=0 Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.617913 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" event={"ID":"503bcbe4-217b-4060-83e3-b0dae775431d","Type":"ContainerDied","Data":"ac96afdc0a6a4855a7d0b9d5a2db5dfb30af17f534353e92f0ccfa36a1a0ed9e"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.629433 4663 generic.go:334] "Generic (PLEG): container finished" podID="6fc61e95-424c-4039-92c6-2a07590312d6" containerID="31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df" exitCode=0 Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.629526 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82xhz" event={"ID":"6fc61e95-424c-4039-92c6-2a07590312d6","Type":"ContainerDied","Data":"31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.629568 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82xhz" event={"ID":"6fc61e95-424c-4039-92c6-2a07590312d6","Type":"ContainerStarted","Data":"a52e2f27500ef68a72f3c1d503cfb1e06051c806782ee89ae29696eaceefddbf"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.633801 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.644054 4663 generic.go:334] "Generic (PLEG): container finished" podID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerID="a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521" exitCode=0 Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.644113 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qz6sl" event={"ID":"47373cbe-7df3-4621-adaf-cf8b6ec73f61","Type":"ContainerDied","Data":"a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.644134 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qz6sl" event={"ID":"47373cbe-7df3-4621-adaf-cf8b6ec73f61","Type":"ContainerStarted","Data":"30881c4e9a9b4f3b59b46c29edcea1a21ef5bdac00b72a510a05e6d15d961fc0"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.659136 4663 generic.go:334] "Generic (PLEG): container finished" podID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerID="652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5" exitCode=0 Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.659202 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klhsb" event={"ID":"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a","Type":"ContainerDied","Data":"652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.659223 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klhsb" event={"ID":"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a","Type":"ContainerStarted","Data":"45d46f4f2050565e7f43f933260ebadcce90d86945a6917bf014666620c65b3c"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.681634 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" event={"ID":"33e2041b-8ae7-4c04-bf4f-3abce29de30d","Type":"ContainerStarted","Data":"50cf7b51469fc8199684f43ed5bff93510270fc928c8c97478f6ce754a4c8082"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.681673 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" event={"ID":"33e2041b-8ae7-4c04-bf4f-3abce29de30d","Type":"ContainerStarted","Data":"c19850c934204cc0340458d974d2a92e7ae17fe92c23b15f91f8428a066cccb5"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.695343 4663 generic.go:334] "Generic (PLEG): container finished" podID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerID="af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3" exitCode=0 Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.695467 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8gc87" event={"ID":"0620a18b-0f89-46ff-b112-0b28ee4d7d5a","Type":"ContainerDied","Data":"af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.696108 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8gc87" event={"ID":"0620a18b-0f89-46ff-b112-0b28ee4d7d5a","Type":"ContainerStarted","Data":"dbf9fcbf90933c89cf4a078821ee95da658d3f4ac217f2155be6bbd384924b28"} Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.699419 4663 patch_prober.go:28] interesting pod/downloads-7954f5f757-4fxpg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.699468 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4fxpg" podUID="adfbdf33-01b1-454d-bb33-b32818244bc2" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.703481 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7wb5x" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.707146 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vhb2f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.713169 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ppb9f"] Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.714111 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.715961 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.719317 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.726547 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppb9f"] Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.795267 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-utilities\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.795412 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-catalog-content\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.795681 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkwfk\" (UniqueName: \"kubernetes.io/projected/7742e8b6-05a7-4a11-b883-7e5488fba6d9-kube-api-access-qkwfk\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.864215 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" podStartSLOduration=8.864201518 podStartE2EDuration="8.864201518s" podCreationTimestamp="2025-12-12 14:06:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:11.863691873 +0000 UTC m=+151.289109927" watchObservedRunningTime="2025-12-12 14:06:11.864201518 +0000 UTC m=+151.289619622" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.896876 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkwfk\" (UniqueName: \"kubernetes.io/projected/7742e8b6-05a7-4a11-b883-7e5488fba6d9-kube-api-access-qkwfk\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.896951 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-utilities\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.896981 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-catalog-content\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.897870 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-catalog-content\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.898254 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-utilities\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.921674 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkwfk\" (UniqueName: \"kubernetes.io/projected/7742e8b6-05a7-4a11-b883-7e5488fba6d9-kube-api-access-qkwfk\") pod \"redhat-marketplace-ppb9f\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:11 crc kubenswrapper[4663]: I1212 14:06:11.970095 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ns242"] Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.026107 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.026163 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.028015 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.105582 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b955r"] Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.106375 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.115951 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b955r"] Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.200580 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-utilities\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.200847 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-catalog-content\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.200884 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmbtj\" (UniqueName: \"kubernetes.io/projected/658c01a0-8350-4105-9f87-db732c74faed-kube-api-access-tmbtj\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.275189 4663 patch_prober.go:28] interesting pod/apiserver-76f77b778f-cvblj container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]log ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]etcd ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/generic-apiserver-start-informers ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/max-in-flight-filter ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 12 14:06:12 crc kubenswrapper[4663]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 12 14:06:12 crc kubenswrapper[4663]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/project.openshift.io-projectcache ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/openshift.io-startinformers ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 12 14:06:12 crc kubenswrapper[4663]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 12 14:06:12 crc kubenswrapper[4663]: livez check failed Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.275239 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" podUID="890f4608-3a85-4bae-be47-f35c7f268a29" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.290188 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppb9f"] Dec 12 14:06:12 crc kubenswrapper[4663]: W1212 14:06:12.295547 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7742e8b6_05a7_4a11_b883_7e5488fba6d9.slice/crio-482e4c89cd8eb6a06d25810cc94ca480d4d3d638454463d6b5e8aafed341fb50 WatchSource:0}: Error finding container 482e4c89cd8eb6a06d25810cc94ca480d4d3d638454463d6b5e8aafed341fb50: Status 404 returned error can't find the container with id 482e4c89cd8eb6a06d25810cc94ca480d4d3d638454463d6b5e8aafed341fb50 Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.302259 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-catalog-content\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.302301 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmbtj\" (UniqueName: \"kubernetes.io/projected/658c01a0-8350-4105-9f87-db732c74faed-kube-api-access-tmbtj\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.302374 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-utilities\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.302706 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-catalog-content\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.302736 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-utilities\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.322540 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmbtj\" (UniqueName: \"kubernetes.io/projected/658c01a0-8350-4105-9f87-db732c74faed-kube-api-access-tmbtj\") pod \"redhat-marketplace-b955r\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.437237 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.488670 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:12 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:12 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:12 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.488715 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.579876 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b955r"] Dec 12 14:06:12 crc kubenswrapper[4663]: W1212 14:06:12.587082 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod658c01a0_8350_4105_9f87_db732c74faed.slice/crio-dac489de5820e120d91bafc3019079f980ae1aceb42d00157321220af4077e91 WatchSource:0}: Error finding container dac489de5820e120d91bafc3019079f980ae1aceb42d00157321220af4077e91: Status 404 returned error can't find the container with id dac489de5820e120d91bafc3019079f980ae1aceb42d00157321220af4077e91 Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.701895 4663 generic.go:334] "Generic (PLEG): container finished" podID="658c01a0-8350-4105-9f87-db732c74faed" containerID="d84a2c7993fb748891e91404ababb5e12d26ac1de1c80815e9acb9cbc7630bbb" exitCode=0 Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.701958 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b955r" event={"ID":"658c01a0-8350-4105-9f87-db732c74faed","Type":"ContainerDied","Data":"d84a2c7993fb748891e91404ababb5e12d26ac1de1c80815e9acb9cbc7630bbb"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.701983 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b955r" event={"ID":"658c01a0-8350-4105-9f87-db732c74faed","Type":"ContainerStarted","Data":"dac489de5820e120d91bafc3019079f980ae1aceb42d00157321220af4077e91"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.707276 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qttz2" event={"ID":"33e2041b-8ae7-4c04-bf4f-3abce29de30d","Type":"ContainerStarted","Data":"5b9cce5a2e5f7d2f0031486edac45258c0c28ccfe1a7bb1ddfc55f724a3a2e0e"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.710143 4663 generic.go:334] "Generic (PLEG): container finished" podID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerID="3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a" exitCode=0 Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.710512 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppb9f" event={"ID":"7742e8b6-05a7-4a11-b883-7e5488fba6d9","Type":"ContainerDied","Data":"3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.710566 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppb9f" event={"ID":"7742e8b6-05a7-4a11-b883-7e5488fba6d9","Type":"ContainerStarted","Data":"482e4c89cd8eb6a06d25810cc94ca480d4d3d638454463d6b5e8aafed341fb50"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.720187 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" event={"ID":"191340bd-661f-4987-b348-20887c35f540","Type":"ContainerStarted","Data":"81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.720219 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" event={"ID":"191340bd-661f-4987-b348-20887c35f540","Type":"ContainerStarted","Data":"8aa6692b218cebfd63748b4934c162028e95b4b079adc7925f49809bae16ace6"} Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.720441 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.757447 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" podStartSLOduration=129.757434154 podStartE2EDuration="2m9.757434154s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:12.75446213 +0000 UTC m=+152.179880184" watchObservedRunningTime="2025-12-12 14:06:12.757434154 +0000 UTC m=+152.182852209" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.878639 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.888267 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.919101 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/503bcbe4-217b-4060-83e3-b0dae775431d-config-volume\") pod \"503bcbe4-217b-4060-83e3-b0dae775431d\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.919152 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-982qs\" (UniqueName: \"kubernetes.io/projected/503bcbe4-217b-4060-83e3-b0dae775431d-kube-api-access-982qs\") pod \"503bcbe4-217b-4060-83e3-b0dae775431d\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.919426 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/503bcbe4-217b-4060-83e3-b0dae775431d-secret-volume\") pod \"503bcbe4-217b-4060-83e3-b0dae775431d\" (UID: \"503bcbe4-217b-4060-83e3-b0dae775431d\") " Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.922876 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/503bcbe4-217b-4060-83e3-b0dae775431d-config-volume" (OuterVolumeSpecName: "config-volume") pod "503bcbe4-217b-4060-83e3-b0dae775431d" (UID: "503bcbe4-217b-4060-83e3-b0dae775431d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.924445 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zlg8l"] Dec 12 14:06:12 crc kubenswrapper[4663]: E1212 14:06:12.924735 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="503bcbe4-217b-4060-83e3-b0dae775431d" containerName="collect-profiles" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.924765 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="503bcbe4-217b-4060-83e3-b0dae775431d" containerName="collect-profiles" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.925589 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="503bcbe4-217b-4060-83e3-b0dae775431d" containerName="collect-profiles" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.927134 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.928310 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/503bcbe4-217b-4060-83e3-b0dae775431d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "503bcbe4-217b-4060-83e3-b0dae775431d" (UID: "503bcbe4-217b-4060-83e3-b0dae775431d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.928645 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.931592 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zlg8l"] Dec 12 14:06:12 crc kubenswrapper[4663]: I1212 14:06:12.932988 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/503bcbe4-217b-4060-83e3-b0dae775431d-kube-api-access-982qs" (OuterVolumeSpecName: "kube-api-access-982qs") pod "503bcbe4-217b-4060-83e3-b0dae775431d" (UID: "503bcbe4-217b-4060-83e3-b0dae775431d"). InnerVolumeSpecName "kube-api-access-982qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.020474 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqnmg\" (UniqueName: \"kubernetes.io/projected/34cf2033-292d-493b-803a-d0171fae07d3-kube-api-access-fqnmg\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.020601 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-utilities\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.020634 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-catalog-content\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.020674 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/503bcbe4-217b-4060-83e3-b0dae775431d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.020686 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-982qs\" (UniqueName: \"kubernetes.io/projected/503bcbe4-217b-4060-83e3-b0dae775431d-kube-api-access-982qs\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.020695 4663 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/503bcbe4-217b-4060-83e3-b0dae775431d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.124383 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-utilities\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.124429 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-catalog-content\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.124466 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqnmg\" (UniqueName: \"kubernetes.io/projected/34cf2033-292d-493b-803a-d0171fae07d3-kube-api-access-fqnmg\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.124993 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-utilities\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.125019 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-catalog-content\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.141186 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqnmg\" (UniqueName: \"kubernetes.io/projected/34cf2033-292d-493b-803a-d0171fae07d3-kube-api-access-fqnmg\") pod \"redhat-operators-zlg8l\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.242381 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.311446 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lsccb"] Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.312495 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.319234 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lsccb"] Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.329543 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-utilities\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.329601 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-catalog-content\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.329626 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-689cd\" (UniqueName: \"kubernetes.io/projected/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-kube-api-access-689cd\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.431467 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-catalog-content\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.431547 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-689cd\" (UniqueName: \"kubernetes.io/projected/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-kube-api-access-689cd\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.431652 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-utilities\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.432200 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-utilities\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.432602 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-catalog-content\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.451405 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-689cd\" (UniqueName: \"kubernetes.io/projected/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-kube-api-access-689cd\") pod \"redhat-operators-lsccb\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.489424 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:13 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:13 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:13 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.489462 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.636787 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.677309 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zlg8l"] Dec 12 14:06:13 crc kubenswrapper[4663]: W1212 14:06:13.682918 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34cf2033_292d_493b_803a_d0171fae07d3.slice/crio-c5ca4f380eb5c0c407bda10d960878c84bfc1ae58fe92e29805b3d4d22a714b6 WatchSource:0}: Error finding container c5ca4f380eb5c0c407bda10d960878c84bfc1ae58fe92e29805b3d4d22a714b6: Status 404 returned error can't find the container with id c5ca4f380eb5c0c407bda10d960878c84bfc1ae58fe92e29805b3d4d22a714b6 Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.731602 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" event={"ID":"503bcbe4-217b-4060-83e3-b0dae775431d","Type":"ContainerDied","Data":"d550dcf9e2bfef978a6b98755cb390f7d6755870574f105c2ed78e0f157f6146"} Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.731635 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d550dcf9e2bfef978a6b98755cb390f7d6755870574f105c2ed78e0f157f6146" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.731690 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425800-bgfn9" Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.737123 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlg8l" event={"ID":"34cf2033-292d-493b-803a-d0171fae07d3","Type":"ContainerStarted","Data":"c5ca4f380eb5c0c407bda10d960878c84bfc1ae58fe92e29805b3d4d22a714b6"} Dec 12 14:06:13 crc kubenswrapper[4663]: I1212 14:06:13.820080 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lsccb"] Dec 12 14:06:13 crc kubenswrapper[4663]: W1212 14:06:13.825682 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe66ebb4_36ec_4c3d_8658_3b014a710cfa.slice/crio-355d38a0366a99f842d4ee4bd0086b2d54631303660f837a9b1853ce43f7243b WatchSource:0}: Error finding container 355d38a0366a99f842d4ee4bd0086b2d54631303660f837a9b1853ce43f7243b: Status 404 returned error can't find the container with id 355d38a0366a99f842d4ee4bd0086b2d54631303660f837a9b1853ce43f7243b Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.059019 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.060911 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.063852 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.065625 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.066774 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.238653 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db681d0f-c07e-45f7-8a45-b83de52065cb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.238729 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db681d0f-c07e-45f7-8a45-b83de52065cb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.339486 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db681d0f-c07e-45f7-8a45-b83de52065cb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.339560 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db681d0f-c07e-45f7-8a45-b83de52065cb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.339616 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db681d0f-c07e-45f7-8a45-b83de52065cb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.365596 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db681d0f-c07e-45f7-8a45-b83de52065cb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.377411 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.490068 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:14 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:14 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:14 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.490262 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.754741 4663 generic.go:334] "Generic (PLEG): container finished" podID="34cf2033-292d-493b-803a-d0171fae07d3" containerID="d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9" exitCode=0 Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.754852 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlg8l" event={"ID":"34cf2033-292d-493b-803a-d0171fae07d3","Type":"ContainerDied","Data":"d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9"} Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.756165 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.761525 4663 generic.go:334] "Generic (PLEG): container finished" podID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerID="ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea" exitCode=0 Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.761605 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lsccb" event={"ID":"fe66ebb4-36ec-4c3d-8658-3b014a710cfa","Type":"ContainerDied","Data":"ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea"} Dec 12 14:06:14 crc kubenswrapper[4663]: I1212 14:06:14.761635 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lsccb" event={"ID":"fe66ebb4-36ec-4c3d-8658-3b014a710cfa","Type":"ContainerStarted","Data":"355d38a0366a99f842d4ee4bd0086b2d54631303660f837a9b1853ce43f7243b"} Dec 12 14:06:15 crc kubenswrapper[4663]: I1212 14:06:15.488587 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:15 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:15 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:15 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:15 crc kubenswrapper[4663]: I1212 14:06:15.488819 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:15 crc kubenswrapper[4663]: I1212 14:06:15.770262 4663 generic.go:334] "Generic (PLEG): container finished" podID="db681d0f-c07e-45f7-8a45-b83de52065cb" containerID="092437132d0a340009da68f6362cc147d868f5f42c2a9ad4f07e2a7d53180977" exitCode=0 Dec 12 14:06:15 crc kubenswrapper[4663]: I1212 14:06:15.770304 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"db681d0f-c07e-45f7-8a45-b83de52065cb","Type":"ContainerDied","Data":"092437132d0a340009da68f6362cc147d868f5f42c2a9ad4f07e2a7d53180977"} Dec 12 14:06:15 crc kubenswrapper[4663]: I1212 14:06:15.770343 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"db681d0f-c07e-45f7-8a45-b83de52065cb","Type":"ContainerStarted","Data":"73722bcb8abc7798141fafbe205f0ced45ccd23182b52691bffa25914f0125ee"} Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.486684 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.489054 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:16 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:16 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:16 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.489110 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.750836 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.751324 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.758528 4663 patch_prober.go:28] interesting pod/console-f9d7485db-f4vfm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.24:8443/health\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.758625 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-f4vfm" podUID="6ac5aa32-7726-427e-99ee-19b74838d5b6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.24:8443/health\": dial tcp 10.217.0.24:8443: connect: connection refused" Dec 12 14:06:16 crc kubenswrapper[4663]: I1212 14:06:16.760853 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-4fxpg" Dec 12 14:06:17 crc kubenswrapper[4663]: I1212 14:06:17.030650 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:17 crc kubenswrapper[4663]: I1212 14:06:17.035278 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-cvblj" Dec 12 14:06:17 crc kubenswrapper[4663]: I1212 14:06:17.491045 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:17 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:17 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:17 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:17 crc kubenswrapper[4663]: I1212 14:06:17.491097 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.093228 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.098384 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db681d0f-c07e-45f7-8a45-b83de52065cb-kube-api-access\") pod \"db681d0f-c07e-45f7-8a45-b83de52065cb\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.098535 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db681d0f-c07e-45f7-8a45-b83de52065cb-kubelet-dir\") pod \"db681d0f-c07e-45f7-8a45-b83de52065cb\" (UID: \"db681d0f-c07e-45f7-8a45-b83de52065cb\") " Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.098644 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db681d0f-c07e-45f7-8a45-b83de52065cb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "db681d0f-c07e-45f7-8a45-b83de52065cb" (UID: "db681d0f-c07e-45f7-8a45-b83de52065cb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.098952 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db681d0f-c07e-45f7-8a45-b83de52065cb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.106222 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db681d0f-c07e-45f7-8a45-b83de52065cb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "db681d0f-c07e-45f7-8a45-b83de52065cb" (UID: "db681d0f-c07e-45f7-8a45-b83de52065cb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.200175 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db681d0f-c07e-45f7-8a45-b83de52065cb-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.488218 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:18 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:18 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:18 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.488284 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.718230 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 12 14:06:18 crc kubenswrapper[4663]: E1212 14:06:18.718455 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db681d0f-c07e-45f7-8a45-b83de52065cb" containerName="pruner" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.718466 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="db681d0f-c07e-45f7-8a45-b83de52065cb" containerName="pruner" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.718551 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="db681d0f-c07e-45f7-8a45-b83de52065cb" containerName="pruner" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.718946 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.720791 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.721571 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.721895 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.807209 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"db681d0f-c07e-45f7-8a45-b83de52065cb","Type":"ContainerDied","Data":"73722bcb8abc7798141fafbe205f0ced45ccd23182b52691bffa25914f0125ee"} Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.807240 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73722bcb8abc7798141fafbe205f0ced45ccd23182b52691bffa25914f0125ee" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.807258 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.818709 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/45a77e02-a260-425d-a07d-8f1cd03200ae-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.818760 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45a77e02-a260-425d-a07d-8f1cd03200ae-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.919596 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/45a77e02-a260-425d-a07d-8f1cd03200ae-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.919639 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45a77e02-a260-425d-a07d-8f1cd03200ae-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.919892 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/45a77e02-a260-425d-a07d-8f1cd03200ae-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:18 crc kubenswrapper[4663]: I1212 14:06:18.933249 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45a77e02-a260-425d-a07d-8f1cd03200ae-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:19 crc kubenswrapper[4663]: I1212 14:06:19.038836 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:19 crc kubenswrapper[4663]: I1212 14:06:19.270629 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-t4pt8" Dec 12 14:06:19 crc kubenswrapper[4663]: I1212 14:06:19.488277 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:19 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:19 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:19 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:19 crc kubenswrapper[4663]: I1212 14:06:19.488482 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:20 crc kubenswrapper[4663]: I1212 14:06:20.487921 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:20 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:20 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:20 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:20 crc kubenswrapper[4663]: I1212 14:06:20.487994 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:21 crc kubenswrapper[4663]: I1212 14:06:21.488274 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:21 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:21 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:21 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:21 crc kubenswrapper[4663]: I1212 14:06:21.488329 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:22 crc kubenswrapper[4663]: I1212 14:06:22.487700 4663 patch_prober.go:28] interesting pod/router-default-5444994796-ddcb6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 12 14:06:22 crc kubenswrapper[4663]: [-]has-synced failed: reason withheld Dec 12 14:06:22 crc kubenswrapper[4663]: [+]process-running ok Dec 12 14:06:22 crc kubenswrapper[4663]: healthz check failed Dec 12 14:06:22 crc kubenswrapper[4663]: I1212 14:06:22.487941 4663 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-ddcb6" podUID="427e7a1a-b751-4226-9c17-7a8b9226ccc9" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 12 14:06:23 crc kubenswrapper[4663]: I1212 14:06:23.488759 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:23 crc kubenswrapper[4663]: I1212 14:06:23.490607 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-ddcb6" Dec 12 14:06:24 crc kubenswrapper[4663]: I1212 14:06:24.136681 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 12 14:06:24 crc kubenswrapper[4663]: W1212 14:06:24.143076 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod45a77e02_a260_425d_a07d_8f1cd03200ae.slice/crio-dc8af77ef926cd484906e7af37afdca33a27906244a7c54c525326094ffa5071 WatchSource:0}: Error finding container dc8af77ef926cd484906e7af37afdca33a27906244a7c54c525326094ffa5071: Status 404 returned error can't find the container with id dc8af77ef926cd484906e7af37afdca33a27906244a7c54c525326094ffa5071 Dec 12 14:06:24 crc kubenswrapper[4663]: I1212 14:06:24.840111 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"45a77e02-a260-425d-a07d-8f1cd03200ae","Type":"ContainerStarted","Data":"b154c25c9db4a1c7933503e11c6326b161d41a37a11dd6cbbb8e583df5279e5c"} Dec 12 14:06:24 crc kubenswrapper[4663]: I1212 14:06:24.840165 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"45a77e02-a260-425d-a07d-8f1cd03200ae","Type":"ContainerStarted","Data":"dc8af77ef926cd484906e7af37afdca33a27906244a7c54c525326094ffa5071"} Dec 12 14:06:24 crc kubenswrapper[4663]: I1212 14:06:24.843867 4663 generic.go:334] "Generic (PLEG): container finished" podID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerID="a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af" exitCode=0 Dec 12 14:06:24 crc kubenswrapper[4663]: I1212 14:06:24.843903 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8gc87" event={"ID":"0620a18b-0f89-46ff-b112-0b28ee4d7d5a","Type":"ContainerDied","Data":"a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af"} Dec 12 14:06:24 crc kubenswrapper[4663]: I1212 14:06:24.852267 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=6.8522560850000005 podStartE2EDuration="6.852256085s" podCreationTimestamp="2025-12-12 14:06:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:24.850283122 +0000 UTC m=+164.275701176" watchObservedRunningTime="2025-12-12 14:06:24.852256085 +0000 UTC m=+164.277674139" Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.099994 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.111387 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/91ce7def-b363-4fe9-8d21-b161e64d0f1c-metrics-certs\") pod \"network-metrics-daemon-9t6qf\" (UID: \"91ce7def-b363-4fe9-8d21-b161e64d0f1c\") " pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.285313 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9t6qf" Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.429556 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9t6qf"] Dec 12 14:06:25 crc kubenswrapper[4663]: W1212 14:06:25.441214 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91ce7def_b363_4fe9_8d21_b161e64d0f1c.slice/crio-cee14df5583450995ea4e7f030ab1154f56935569c7337db40a2d76a46a055d2 WatchSource:0}: Error finding container cee14df5583450995ea4e7f030ab1154f56935569c7337db40a2d76a46a055d2: Status 404 returned error can't find the container with id cee14df5583450995ea4e7f030ab1154f56935569c7337db40a2d76a46a055d2 Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.849310 4663 generic.go:334] "Generic (PLEG): container finished" podID="45a77e02-a260-425d-a07d-8f1cd03200ae" containerID="b154c25c9db4a1c7933503e11c6326b161d41a37a11dd6cbbb8e583df5279e5c" exitCode=0 Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.849384 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"45a77e02-a260-425d-a07d-8f1cd03200ae","Type":"ContainerDied","Data":"b154c25c9db4a1c7933503e11c6326b161d41a37a11dd6cbbb8e583df5279e5c"} Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.852587 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" event={"ID":"91ce7def-b363-4fe9-8d21-b161e64d0f1c","Type":"ContainerStarted","Data":"ab7d89d05b73e66b8af758bd3499a6d6735fff504943984c10b1d4d107d64275"} Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.852618 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" event={"ID":"91ce7def-b363-4fe9-8d21-b161e64d0f1c","Type":"ContainerStarted","Data":"7c3860308e98a9cc63f2c2b5e1cd1b2bb44c70d0db7fa68403ff5b67c068f007"} Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.852630 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9t6qf" event={"ID":"91ce7def-b363-4fe9-8d21-b161e64d0f1c","Type":"ContainerStarted","Data":"cee14df5583450995ea4e7f030ab1154f56935569c7337db40a2d76a46a055d2"} Dec 12 14:06:25 crc kubenswrapper[4663]: I1212 14:06:25.872944 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-9t6qf" podStartSLOduration=142.872927831 podStartE2EDuration="2m22.872927831s" podCreationTimestamp="2025-12-12 14:04:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:25.86949777 +0000 UTC m=+165.294915825" watchObservedRunningTime="2025-12-12 14:06:25.872927831 +0000 UTC m=+165.298345885" Dec 12 14:06:26 crc kubenswrapper[4663]: I1212 14:06:26.754418 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:26 crc kubenswrapper[4663]: I1212 14:06:26.757236 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:06:26 crc kubenswrapper[4663]: I1212 14:06:26.858299 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8gc87" event={"ID":"0620a18b-0f89-46ff-b112-0b28ee4d7d5a","Type":"ContainerStarted","Data":"ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885"} Dec 12 14:06:26 crc kubenswrapper[4663]: I1212 14:06:26.879576 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8gc87" podStartSLOduration=3.781678626 podStartE2EDuration="17.879562201s" podCreationTimestamp="2025-12-12 14:06:09 +0000 UTC" firstStartedPulling="2025-12-12 14:06:11.697664152 +0000 UTC m=+151.123082205" lastFinishedPulling="2025-12-12 14:06:25.795547736 +0000 UTC m=+165.220965780" observedRunningTime="2025-12-12 14:06:26.87777728 +0000 UTC m=+166.303195334" watchObservedRunningTime="2025-12-12 14:06:26.879562201 +0000 UTC m=+166.304980256" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.028635 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.122663 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/45a77e02-a260-425d-a07d-8f1cd03200ae-kubelet-dir\") pod \"45a77e02-a260-425d-a07d-8f1cd03200ae\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.122759 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/45a77e02-a260-425d-a07d-8f1cd03200ae-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "45a77e02-a260-425d-a07d-8f1cd03200ae" (UID: "45a77e02-a260-425d-a07d-8f1cd03200ae"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.122783 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45a77e02-a260-425d-a07d-8f1cd03200ae-kube-api-access\") pod \"45a77e02-a260-425d-a07d-8f1cd03200ae\" (UID: \"45a77e02-a260-425d-a07d-8f1cd03200ae\") " Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.122956 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/45a77e02-a260-425d-a07d-8f1cd03200ae-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.130112 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45a77e02-a260-425d-a07d-8f1cd03200ae-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "45a77e02-a260-425d-a07d-8f1cd03200ae" (UID: "45a77e02-a260-425d-a07d-8f1cd03200ae"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.223972 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45a77e02-a260-425d-a07d-8f1cd03200ae-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.863777 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"45a77e02-a260-425d-a07d-8f1cd03200ae","Type":"ContainerDied","Data":"dc8af77ef926cd484906e7af37afdca33a27906244a7c54c525326094ffa5071"} Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.863858 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc8af77ef926cd484906e7af37afdca33a27906244a7c54c525326094ffa5071" Dec 12 14:06:27 crc kubenswrapper[4663]: I1212 14:06:27.863800 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 12 14:06:28 crc kubenswrapper[4663]: I1212 14:06:28.996675 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8g7gt"] Dec 12 14:06:28 crc kubenswrapper[4663]: I1212 14:06:28.996861 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" podUID="308ebf9e-6dd5-46a3-a344-176905e7915e" containerName="controller-manager" containerID="cri-o://8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4" gracePeriod=30 Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.010996 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76"] Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.011165 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" podUID="a7b5011e-2f07-4f46-8597-96fb97e199ec" containerName="route-controller-manager" containerID="cri-o://57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db" gracePeriod=30 Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.351258 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.354917 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.449958 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7b5011e-2f07-4f46-8597-96fb97e199ec-serving-cert\") pod \"a7b5011e-2f07-4f46-8597-96fb97e199ec\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450016 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-client-ca\") pod \"308ebf9e-6dd5-46a3-a344-176905e7915e\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450041 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-client-ca\") pod \"a7b5011e-2f07-4f46-8597-96fb97e199ec\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450065 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-config\") pod \"a7b5011e-2f07-4f46-8597-96fb97e199ec\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450098 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wx9p\" (UniqueName: \"kubernetes.io/projected/308ebf9e-6dd5-46a3-a344-176905e7915e-kube-api-access-5wx9p\") pod \"308ebf9e-6dd5-46a3-a344-176905e7915e\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450117 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308ebf9e-6dd5-46a3-a344-176905e7915e-serving-cert\") pod \"308ebf9e-6dd5-46a3-a344-176905e7915e\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450156 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-proxy-ca-bundles\") pod \"308ebf9e-6dd5-46a3-a344-176905e7915e\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450180 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jw4m\" (UniqueName: \"kubernetes.io/projected/a7b5011e-2f07-4f46-8597-96fb97e199ec-kube-api-access-6jw4m\") pod \"a7b5011e-2f07-4f46-8597-96fb97e199ec\" (UID: \"a7b5011e-2f07-4f46-8597-96fb97e199ec\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.450197 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-config\") pod \"308ebf9e-6dd5-46a3-a344-176905e7915e\" (UID: \"308ebf9e-6dd5-46a3-a344-176905e7915e\") " Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.451420 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-client-ca" (OuterVolumeSpecName: "client-ca") pod "a7b5011e-2f07-4f46-8597-96fb97e199ec" (UID: "a7b5011e-2f07-4f46-8597-96fb97e199ec"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.451798 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-config" (OuterVolumeSpecName: "config") pod "308ebf9e-6dd5-46a3-a344-176905e7915e" (UID: "308ebf9e-6dd5-46a3-a344-176905e7915e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.451781 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-client-ca" (OuterVolumeSpecName: "client-ca") pod "308ebf9e-6dd5-46a3-a344-176905e7915e" (UID: "308ebf9e-6dd5-46a3-a344-176905e7915e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.451982 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-config" (OuterVolumeSpecName: "config") pod "a7b5011e-2f07-4f46-8597-96fb97e199ec" (UID: "a7b5011e-2f07-4f46-8597-96fb97e199ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.452345 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "308ebf9e-6dd5-46a3-a344-176905e7915e" (UID: "308ebf9e-6dd5-46a3-a344-176905e7915e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.455836 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308ebf9e-6dd5-46a3-a344-176905e7915e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "308ebf9e-6dd5-46a3-a344-176905e7915e" (UID: "308ebf9e-6dd5-46a3-a344-176905e7915e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.455936 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7b5011e-2f07-4f46-8597-96fb97e199ec-kube-api-access-6jw4m" (OuterVolumeSpecName: "kube-api-access-6jw4m") pod "a7b5011e-2f07-4f46-8597-96fb97e199ec" (UID: "a7b5011e-2f07-4f46-8597-96fb97e199ec"). InnerVolumeSpecName "kube-api-access-6jw4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.456028 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b5011e-2f07-4f46-8597-96fb97e199ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a7b5011e-2f07-4f46-8597-96fb97e199ec" (UID: "a7b5011e-2f07-4f46-8597-96fb97e199ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.457305 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308ebf9e-6dd5-46a3-a344-176905e7915e-kube-api-access-5wx9p" (OuterVolumeSpecName: "kube-api-access-5wx9p") pod "308ebf9e-6dd5-46a3-a344-176905e7915e" (UID: "308ebf9e-6dd5-46a3-a344-176905e7915e"). InnerVolumeSpecName "kube-api-access-5wx9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551724 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7b5011e-2f07-4f46-8597-96fb97e199ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551769 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551779 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551788 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7b5011e-2f07-4f46-8597-96fb97e199ec-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551798 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wx9p\" (UniqueName: \"kubernetes.io/projected/308ebf9e-6dd5-46a3-a344-176905e7915e-kube-api-access-5wx9p\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551807 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/308ebf9e-6dd5-46a3-a344-176905e7915e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551818 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551826 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jw4m\" (UniqueName: \"kubernetes.io/projected/a7b5011e-2f07-4f46-8597-96fb97e199ec-kube-api-access-6jw4m\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.551833 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/308ebf9e-6dd5-46a3-a344-176905e7915e-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.875817 4663 generic.go:334] "Generic (PLEG): container finished" podID="a7b5011e-2f07-4f46-8597-96fb97e199ec" containerID="57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db" exitCode=0 Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.875869 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.875880 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" event={"ID":"a7b5011e-2f07-4f46-8597-96fb97e199ec","Type":"ContainerDied","Data":"57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db"} Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.875899 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76" event={"ID":"a7b5011e-2f07-4f46-8597-96fb97e199ec","Type":"ContainerDied","Data":"8c32fe23273fbda0c50dad60cc32950b34a4909b361f392b2f181d6a69a1dc8b"} Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.875915 4663 scope.go:117] "RemoveContainer" containerID="57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.878553 4663 generic.go:334] "Generic (PLEG): container finished" podID="308ebf9e-6dd5-46a3-a344-176905e7915e" containerID="8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4" exitCode=0 Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.878575 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" event={"ID":"308ebf9e-6dd5-46a3-a344-176905e7915e","Type":"ContainerDied","Data":"8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4"} Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.878592 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" event={"ID":"308ebf9e-6dd5-46a3-a344-176905e7915e","Type":"ContainerDied","Data":"58e8cbef8d1cfe1f9456b9865103ac368ec3d532d81ec1b5c93a4cfadcf3dfbb"} Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.878637 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8g7gt" Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.897896 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76"] Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.900837 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-zsn76"] Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.908417 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8g7gt"] Dec 12 14:06:29 crc kubenswrapper[4663]: I1212 14:06:29.908553 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8g7gt"] Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.284625 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.284666 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.359217 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.855861 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf"] Dec 12 14:06:30 crc kubenswrapper[4663]: E1212 14:06:30.859600 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b5011e-2f07-4f46-8597-96fb97e199ec" containerName="route-controller-manager" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.859625 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b5011e-2f07-4f46-8597-96fb97e199ec" containerName="route-controller-manager" Dec 12 14:06:30 crc kubenswrapper[4663]: E1212 14:06:30.859640 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45a77e02-a260-425d-a07d-8f1cd03200ae" containerName="pruner" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.859648 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="45a77e02-a260-425d-a07d-8f1cd03200ae" containerName="pruner" Dec 12 14:06:30 crc kubenswrapper[4663]: E1212 14:06:30.859660 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="308ebf9e-6dd5-46a3-a344-176905e7915e" containerName="controller-manager" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.859667 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="308ebf9e-6dd5-46a3-a344-176905e7915e" containerName="controller-manager" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.859815 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b5011e-2f07-4f46-8597-96fb97e199ec" containerName="route-controller-manager" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.859830 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="308ebf9e-6dd5-46a3-a344-176905e7915e" containerName="controller-manager" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.859836 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="45a77e02-a260-425d-a07d-8f1cd03200ae" containerName="pruner" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.862465 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7cd569bdb6-hzql8"] Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.862686 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.864571 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.864580 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.865853 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.865889 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.865905 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.866070 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.866096 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868289 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf"] Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868821 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79f074de-de62-4db5-8313-5b6a285db97f-serving-cert\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868851 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-client-ca\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868879 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-config\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868898 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv7h2\" (UniqueName: \"kubernetes.io/projected/79f074de-de62-4db5-8313-5b6a285db97f-kube-api-access-fv7h2\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868924 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-client-ca\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868942 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-proxy-ca-bundles\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868959 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-config\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.868987 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/170ec3cf-0fa5-47eb-9357-05f41d67723c-serving-cert\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.869019 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fls8\" (UniqueName: \"kubernetes.io/projected/170ec3cf-0fa5-47eb-9357-05f41d67723c-kube-api-access-8fls8\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.869244 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.869410 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.869590 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.869651 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.869784 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.871258 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.875112 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.881226 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308ebf9e-6dd5-46a3-a344-176905e7915e" path="/var/lib/kubelet/pods/308ebf9e-6dd5-46a3-a344-176905e7915e/volumes" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.881816 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7b5011e-2f07-4f46-8597-96fb97e199ec" path="/var/lib/kubelet/pods/a7b5011e-2f07-4f46-8597-96fb97e199ec/volumes" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.882309 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cd569bdb6-hzql8"] Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.915235 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.964530 4663 scope.go:117] "RemoveContainer" containerID="57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db" Dec 12 14:06:30 crc kubenswrapper[4663]: E1212 14:06:30.965016 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db\": container with ID starting with 57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db not found: ID does not exist" containerID="57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.965060 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db"} err="failed to get container status \"57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db\": rpc error: code = NotFound desc = could not find container \"57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db\": container with ID starting with 57f4be4f1fdc2c1e6fad00f8e86df43fa210ace3af20ef2f5ef54354bdb1e5db not found: ID does not exist" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.965094 4663 scope.go:117] "RemoveContainer" containerID="8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969250 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fls8\" (UniqueName: \"kubernetes.io/projected/170ec3cf-0fa5-47eb-9357-05f41d67723c-kube-api-access-8fls8\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969291 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79f074de-de62-4db5-8313-5b6a285db97f-serving-cert\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969311 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-client-ca\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969331 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-config\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969353 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv7h2\" (UniqueName: \"kubernetes.io/projected/79f074de-de62-4db5-8313-5b6a285db97f-kube-api-access-fv7h2\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969386 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-client-ca\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969400 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-proxy-ca-bundles\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969417 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-config\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.969446 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/170ec3cf-0fa5-47eb-9357-05f41d67723c-serving-cert\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.970655 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-config\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.970731 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-config\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.970772 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-client-ca\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.970968 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-proxy-ca-bundles\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.970983 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-client-ca\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.973391 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/170ec3cf-0fa5-47eb-9357-05f41d67723c-serving-cert\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.974023 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79f074de-de62-4db5-8313-5b6a285db97f-serving-cert\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.982544 4663 scope.go:117] "RemoveContainer" containerID="8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4" Dec 12 14:06:30 crc kubenswrapper[4663]: E1212 14:06:30.982810 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4\": container with ID starting with 8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4 not found: ID does not exist" containerID="8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.982836 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4"} err="failed to get container status \"8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4\": rpc error: code = NotFound desc = could not find container \"8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4\": container with ID starting with 8803ce300ee6c2fbf08cacf605e6d50f1f1e54c0a0360ba0f5f24bf5f7349fd4 not found: ID does not exist" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.983830 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv7h2\" (UniqueName: \"kubernetes.io/projected/79f074de-de62-4db5-8313-5b6a285db97f-kube-api-access-fv7h2\") pod \"controller-manager-7cd569bdb6-hzql8\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:30 crc kubenswrapper[4663]: I1212 14:06:30.985718 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fls8\" (UniqueName: \"kubernetes.io/projected/170ec3cf-0fa5-47eb-9357-05f41d67723c-kube-api-access-8fls8\") pod \"route-controller-manager-77c55447db-2fptf\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:31 crc kubenswrapper[4663]: I1212 14:06:31.185422 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:31 crc kubenswrapper[4663]: I1212 14:06:31.190981 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:31 crc kubenswrapper[4663]: I1212 14:06:31.600649 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:06:34 crc kubenswrapper[4663]: I1212 14:06:34.981890 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf"] Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.136736 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cd569bdb6-hzql8"] Dec 12 14:06:35 crc kubenswrapper[4663]: W1212 14:06:35.293866 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79f074de_de62_4db5_8313_5b6a285db97f.slice/crio-9da2558b52663283ffdd02a213815d0eb4971f1bc1034dec5ae29b5f04fb0607 WatchSource:0}: Error finding container 9da2558b52663283ffdd02a213815d0eb4971f1bc1034dec5ae29b5f04fb0607: Status 404 returned error can't find the container with id 9da2558b52663283ffdd02a213815d0eb4971f1bc1034dec5ae29b5f04fb0607 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.923589 4663 generic.go:334] "Generic (PLEG): container finished" podID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerID="5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.923662 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppb9f" event={"ID":"7742e8b6-05a7-4a11-b883-7e5488fba6d9","Type":"ContainerDied","Data":"5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.926074 4663 generic.go:334] "Generic (PLEG): container finished" podID="34cf2033-292d-493b-803a-d0171fae07d3" containerID="28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.926119 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlg8l" event={"ID":"34cf2033-292d-493b-803a-d0171fae07d3","Type":"ContainerDied","Data":"28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.927484 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" event={"ID":"79f074de-de62-4db5-8313-5b6a285db97f","Type":"ContainerStarted","Data":"7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.927509 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" event={"ID":"79f074de-de62-4db5-8313-5b6a285db97f","Type":"ContainerStarted","Data":"9da2558b52663283ffdd02a213815d0eb4971f1bc1034dec5ae29b5f04fb0607"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.927668 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.929177 4663 generic.go:334] "Generic (PLEG): container finished" podID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerID="c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.929251 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qz6sl" event={"ID":"47373cbe-7df3-4621-adaf-cf8b6ec73f61","Type":"ContainerDied","Data":"c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.932171 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.932620 4663 generic.go:334] "Generic (PLEG): container finished" podID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerID="a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.932666 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klhsb" event={"ID":"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a","Type":"ContainerDied","Data":"a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.935094 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" event={"ID":"170ec3cf-0fa5-47eb-9357-05f41d67723c","Type":"ContainerStarted","Data":"75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.935125 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" event={"ID":"170ec3cf-0fa5-47eb-9357-05f41d67723c","Type":"ContainerStarted","Data":"97e988459cab8577c8430ce56b0ef3b35022d9f269ddca1914a59647ba38741a"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.935795 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.939538 4663 generic.go:334] "Generic (PLEG): container finished" podID="6fc61e95-424c-4039-92c6-2a07590312d6" containerID="69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.940274 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82xhz" event={"ID":"6fc61e95-424c-4039-92c6-2a07590312d6","Type":"ContainerDied","Data":"69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.941513 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.942833 4663 generic.go:334] "Generic (PLEG): container finished" podID="658c01a0-8350-4105-9f87-db732c74faed" containerID="54761cdb7fb4bdeacf39a43a47844c53cedea015a8359475e7056b279cf560a5" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.942892 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b955r" event={"ID":"658c01a0-8350-4105-9f87-db732c74faed","Type":"ContainerDied","Data":"54761cdb7fb4bdeacf39a43a47844c53cedea015a8359475e7056b279cf560a5"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.945328 4663 generic.go:334] "Generic (PLEG): container finished" podID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerID="859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18" exitCode=0 Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.945352 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lsccb" event={"ID":"fe66ebb4-36ec-4c3d-8658-3b014a710cfa","Type":"ContainerDied","Data":"859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18"} Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.961426 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" podStartSLOduration=6.961412978 podStartE2EDuration="6.961412978s" podCreationTimestamp="2025-12-12 14:06:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:35.959012464 +0000 UTC m=+175.384430518" watchObservedRunningTime="2025-12-12 14:06:35.961412978 +0000 UTC m=+175.386831033" Dec 12 14:06:35 crc kubenswrapper[4663]: I1212 14:06:35.984774 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" podStartSLOduration=6.9847583669999995 podStartE2EDuration="6.984758367s" podCreationTimestamp="2025-12-12 14:06:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:35.982562086 +0000 UTC m=+175.407980141" watchObservedRunningTime="2025-12-12 14:06:35.984758367 +0000 UTC m=+175.410176422" Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.083042 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.128996 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.129043 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.265976 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2rskd" Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.951050 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppb9f" event={"ID":"7742e8b6-05a7-4a11-b883-7e5488fba6d9","Type":"ContainerStarted","Data":"3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696"} Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.952777 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82xhz" event={"ID":"6fc61e95-424c-4039-92c6-2a07590312d6","Type":"ContainerStarted","Data":"fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35"} Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.955107 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qz6sl" event={"ID":"47373cbe-7df3-4621-adaf-cf8b6ec73f61","Type":"ContainerStarted","Data":"57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2"} Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.956773 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b955r" event={"ID":"658c01a0-8350-4105-9f87-db732c74faed","Type":"ContainerStarted","Data":"9acd8f4233cc3a403c408ec1675368ed15ae6a62a8a78e8359dce2c71b6b7122"} Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.958561 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klhsb" event={"ID":"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a","Type":"ContainerStarted","Data":"6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca"} Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.960167 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lsccb" event={"ID":"fe66ebb4-36ec-4c3d-8658-3b014a710cfa","Type":"ContainerStarted","Data":"1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede"} Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.963091 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ppb9f" podStartSLOduration=1.972800262 podStartE2EDuration="25.963079138s" podCreationTimestamp="2025-12-12 14:06:11 +0000 UTC" firstStartedPulling="2025-12-12 14:06:12.716379546 +0000 UTC m=+152.141797600" lastFinishedPulling="2025-12-12 14:06:36.706658422 +0000 UTC m=+176.132076476" observedRunningTime="2025-12-12 14:06:36.962673529 +0000 UTC m=+176.388091583" watchObservedRunningTime="2025-12-12 14:06:36.963079138 +0000 UTC m=+176.388497193" Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.997428 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-klhsb" podStartSLOduration=2.062936505 podStartE2EDuration="26.997414019s" podCreationTimestamp="2025-12-12 14:06:10 +0000 UTC" firstStartedPulling="2025-12-12 14:06:11.665032649 +0000 UTC m=+151.090450703" lastFinishedPulling="2025-12-12 14:06:36.599510164 +0000 UTC m=+176.024928217" observedRunningTime="2025-12-12 14:06:36.99646337 +0000 UTC m=+176.421881423" watchObservedRunningTime="2025-12-12 14:06:36.997414019 +0000 UTC m=+176.422832073" Dec 12 14:06:36 crc kubenswrapper[4663]: I1212 14:06:36.997513 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b955r" podStartSLOduration=1.076341528 podStartE2EDuration="24.997508405s" podCreationTimestamp="2025-12-12 14:06:12 +0000 UTC" firstStartedPulling="2025-12-12 14:06:12.703758521 +0000 UTC m=+152.129176575" lastFinishedPulling="2025-12-12 14:06:36.624925398 +0000 UTC m=+176.050343452" observedRunningTime="2025-12-12 14:06:36.980316685 +0000 UTC m=+176.405734739" watchObservedRunningTime="2025-12-12 14:06:36.997508405 +0000 UTC m=+176.422926459" Dec 12 14:06:37 crc kubenswrapper[4663]: I1212 14:06:37.024641 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qz6sl" podStartSLOduration=2.247231521 podStartE2EDuration="27.02462783s" podCreationTimestamp="2025-12-12 14:06:10 +0000 UTC" firstStartedPulling="2025-12-12 14:06:11.654919959 +0000 UTC m=+151.080338004" lastFinishedPulling="2025-12-12 14:06:36.432316259 +0000 UTC m=+175.857734313" observedRunningTime="2025-12-12 14:06:37.01136892 +0000 UTC m=+176.436786974" watchObservedRunningTime="2025-12-12 14:06:37.02462783 +0000 UTC m=+176.450045884" Dec 12 14:06:37 crc kubenswrapper[4663]: I1212 14:06:37.026053 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lsccb" podStartSLOduration=2.3125577379999998 podStartE2EDuration="24.026047697s" podCreationTimestamp="2025-12-12 14:06:13 +0000 UTC" firstStartedPulling="2025-12-12 14:06:14.762737134 +0000 UTC m=+154.188155188" lastFinishedPulling="2025-12-12 14:06:36.476227093 +0000 UTC m=+175.901645147" observedRunningTime="2025-12-12 14:06:37.024908986 +0000 UTC m=+176.450327040" watchObservedRunningTime="2025-12-12 14:06:37.026047697 +0000 UTC m=+176.451465751" Dec 12 14:06:37 crc kubenswrapper[4663]: I1212 14:06:37.045476 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-82xhz" podStartSLOduration=3.263083102 podStartE2EDuration="28.045469893s" podCreationTimestamp="2025-12-12 14:06:09 +0000 UTC" firstStartedPulling="2025-12-12 14:06:11.633559876 +0000 UTC m=+151.058977930" lastFinishedPulling="2025-12-12 14:06:36.415946667 +0000 UTC m=+175.841364721" observedRunningTime="2025-12-12 14:06:37.042518268 +0000 UTC m=+176.467936323" watchObservedRunningTime="2025-12-12 14:06:37.045469893 +0000 UTC m=+176.470887947" Dec 12 14:06:37 crc kubenswrapper[4663]: I1212 14:06:37.967962 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlg8l" event={"ID":"34cf2033-292d-493b-803a-d0171fae07d3","Type":"ContainerStarted","Data":"a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1"} Dec 12 14:06:37 crc kubenswrapper[4663]: I1212 14:06:37.986961 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zlg8l" podStartSLOduration=3.400813582 podStartE2EDuration="25.986945505s" podCreationTimestamp="2025-12-12 14:06:12 +0000 UTC" firstStartedPulling="2025-12-12 14:06:14.757712438 +0000 UTC m=+154.183130492" lastFinishedPulling="2025-12-12 14:06:37.343844361 +0000 UTC m=+176.769262415" observedRunningTime="2025-12-12 14:06:37.985158229 +0000 UTC m=+177.410576283" watchObservedRunningTime="2025-12-12 14:06:37.986945505 +0000 UTC m=+177.412363558" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.037970 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.038342 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.076622 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.456071 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.456365 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.482115 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.673936 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.674218 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:40 crc kubenswrapper[4663]: I1212 14:06:40.700226 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.014831 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.018522 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.029169 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.029209 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.054926 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.437485 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.437536 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:42 crc kubenswrapper[4663]: I1212 14:06:42.464721 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.017896 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.018269 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.242988 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.243022 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.268195 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.638038 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.638240 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:43 crc kubenswrapper[4663]: I1212 14:06:43.663616 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.021443 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.021491 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.344849 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-klhsb"] Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.345063 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-klhsb" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="registry-server" containerID="cri-o://6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca" gracePeriod=2 Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.692949 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.732066 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fsc4\" (UniqueName: \"kubernetes.io/projected/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-kube-api-access-9fsc4\") pod \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.732132 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-catalog-content\") pod \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.732169 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-utilities\") pod \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\" (UID: \"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a\") " Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.732874 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-utilities" (OuterVolumeSpecName: "utilities") pod "d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" (UID: "d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.735909 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-kube-api-access-9fsc4" (OuterVolumeSpecName: "kube-api-access-9fsc4") pod "d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" (UID: "d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a"). InnerVolumeSpecName "kube-api-access-9fsc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.771056 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" (UID: "d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.832915 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.832940 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.832949 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fsc4\" (UniqueName: \"kubernetes.io/projected/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a-kube-api-access-9fsc4\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:44 crc kubenswrapper[4663]: I1212 14:06:44.938624 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5xjs7"] Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.005160 4663 generic.go:334] "Generic (PLEG): container finished" podID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerID="6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca" exitCode=0 Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.005228 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klhsb" event={"ID":"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a","Type":"ContainerDied","Data":"6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca"} Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.005274 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-klhsb" event={"ID":"d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a","Type":"ContainerDied","Data":"45d46f4f2050565e7f43f933260ebadcce90d86945a6917bf014666620c65b3c"} Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.005292 4663 scope.go:117] "RemoveContainer" containerID="6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.005247 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-klhsb" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.019672 4663 scope.go:117] "RemoveContainer" containerID="a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.025659 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-klhsb"] Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.028464 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-klhsb"] Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.035577 4663 scope.go:117] "RemoveContainer" containerID="652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.048948 4663 scope.go:117] "RemoveContainer" containerID="6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca" Dec 12 14:06:45 crc kubenswrapper[4663]: E1212 14:06:45.049339 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca\": container with ID starting with 6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca not found: ID does not exist" containerID="6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.049392 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca"} err="failed to get container status \"6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca\": rpc error: code = NotFound desc = could not find container \"6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca\": container with ID starting with 6bb8fcd94acc6eef49b982165c9d2008452d902c70f3df55d4afa36700fec1ca not found: ID does not exist" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.049828 4663 scope.go:117] "RemoveContainer" containerID="a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32" Dec 12 14:06:45 crc kubenswrapper[4663]: E1212 14:06:45.050131 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32\": container with ID starting with a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32 not found: ID does not exist" containerID="a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.050164 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32"} err="failed to get container status \"a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32\": rpc error: code = NotFound desc = could not find container \"a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32\": container with ID starting with a7dc98277a57200629c499780a8ecb128360f1b23f4b5b17a4b1b5476a9d4f32 not found: ID does not exist" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.050189 4663 scope.go:117] "RemoveContainer" containerID="652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5" Dec 12 14:06:45 crc kubenswrapper[4663]: E1212 14:06:45.050442 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5\": container with ID starting with 652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5 not found: ID does not exist" containerID="652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5" Dec 12 14:06:45 crc kubenswrapper[4663]: I1212 14:06:45.050467 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5"} err="failed to get container status \"652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5\": rpc error: code = NotFound desc = could not find container \"652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5\": container with ID starting with 652afb629d6c569f640382ad273ec416a94f92fc027cad442943ad4bc06d41c5 not found: ID does not exist" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.145338 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qz6sl"] Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.146420 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qz6sl" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="registry-server" containerID="cri-o://57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2" gracePeriod=2 Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.546415 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.656248 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-utilities\") pod \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.656344 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8txm\" (UniqueName: \"kubernetes.io/projected/47373cbe-7df3-4621-adaf-cf8b6ec73f61-kube-api-access-w8txm\") pod \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.656366 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-catalog-content\") pod \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\" (UID: \"47373cbe-7df3-4621-adaf-cf8b6ec73f61\") " Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.656858 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-utilities" (OuterVolumeSpecName: "utilities") pod "47373cbe-7df3-4621-adaf-cf8b6ec73f61" (UID: "47373cbe-7df3-4621-adaf-cf8b6ec73f61"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.660304 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47373cbe-7df3-4621-adaf-cf8b6ec73f61-kube-api-access-w8txm" (OuterVolumeSpecName: "kube-api-access-w8txm") pod "47373cbe-7df3-4621-adaf-cf8b6ec73f61" (UID: "47373cbe-7df3-4621-adaf-cf8b6ec73f61"). InnerVolumeSpecName "kube-api-access-w8txm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.691825 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47373cbe-7df3-4621-adaf-cf8b6ec73f61" (UID: "47373cbe-7df3-4621-adaf-cf8b6ec73f61"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.743637 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b955r"] Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.743838 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b955r" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="registry-server" containerID="cri-o://9acd8f4233cc3a403c408ec1675368ed15ae6a62a8a78e8359dce2c71b6b7122" gracePeriod=2 Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.757514 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.757533 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8txm\" (UniqueName: \"kubernetes.io/projected/47373cbe-7df3-4621-adaf-cf8b6ec73f61-kube-api-access-w8txm\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.757543 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47373cbe-7df3-4621-adaf-cf8b6ec73f61-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:46 crc kubenswrapper[4663]: I1212 14:06:46.875188 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" path="/var/lib/kubelet/pods/d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a/volumes" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.020165 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qz6sl" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.020188 4663 generic.go:334] "Generic (PLEG): container finished" podID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerID="57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2" exitCode=0 Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.020215 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qz6sl" event={"ID":"47373cbe-7df3-4621-adaf-cf8b6ec73f61","Type":"ContainerDied","Data":"57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2"} Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.020261 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qz6sl" event={"ID":"47373cbe-7df3-4621-adaf-cf8b6ec73f61","Type":"ContainerDied","Data":"30881c4e9a9b4f3b59b46c29edcea1a21ef5bdac00b72a510a05e6d15d961fc0"} Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.020281 4663 scope.go:117] "RemoveContainer" containerID="57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.030381 4663 generic.go:334] "Generic (PLEG): container finished" podID="658c01a0-8350-4105-9f87-db732c74faed" containerID="9acd8f4233cc3a403c408ec1675368ed15ae6a62a8a78e8359dce2c71b6b7122" exitCode=0 Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.030432 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b955r" event={"ID":"658c01a0-8350-4105-9f87-db732c74faed","Type":"ContainerDied","Data":"9acd8f4233cc3a403c408ec1675368ed15ae6a62a8a78e8359dce2c71b6b7122"} Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.032600 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qz6sl"] Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.034019 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qz6sl"] Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.034204 4663 scope.go:117] "RemoveContainer" containerID="c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.044420 4663 scope.go:117] "RemoveContainer" containerID="a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.056675 4663 scope.go:117] "RemoveContainer" containerID="57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2" Dec 12 14:06:47 crc kubenswrapper[4663]: E1212 14:06:47.057023 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2\": container with ID starting with 57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2 not found: ID does not exist" containerID="57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.057048 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2"} err="failed to get container status \"57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2\": rpc error: code = NotFound desc = could not find container \"57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2\": container with ID starting with 57c6287a5a5b8e8b506c7f83681c06c0c92242ea0a513a8cb4537363ebb28af2 not found: ID does not exist" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.057081 4663 scope.go:117] "RemoveContainer" containerID="c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735" Dec 12 14:06:47 crc kubenswrapper[4663]: E1212 14:06:47.057357 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735\": container with ID starting with c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735 not found: ID does not exist" containerID="c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.057407 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735"} err="failed to get container status \"c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735\": rpc error: code = NotFound desc = could not find container \"c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735\": container with ID starting with c4c0b65f661c3b228cbd67da6bbc31900ca0cad16b7b8a93eb13e4066b3c7735 not found: ID does not exist" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.057434 4663 scope.go:117] "RemoveContainer" containerID="a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521" Dec 12 14:06:47 crc kubenswrapper[4663]: E1212 14:06:47.057807 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521\": container with ID starting with a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521 not found: ID does not exist" containerID="a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.057836 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521"} err="failed to get container status \"a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521\": rpc error: code = NotFound desc = could not find container \"a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521\": container with ID starting with a11023282b7066a1586b2c94b4e9ef4727fc6bcea2fdaf301b9ea3d613b86521 not found: ID does not exist" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.081784 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.160858 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-utilities\") pod \"658c01a0-8350-4105-9f87-db732c74faed\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.160980 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmbtj\" (UniqueName: \"kubernetes.io/projected/658c01a0-8350-4105-9f87-db732c74faed-kube-api-access-tmbtj\") pod \"658c01a0-8350-4105-9f87-db732c74faed\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.161031 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-catalog-content\") pod \"658c01a0-8350-4105-9f87-db732c74faed\" (UID: \"658c01a0-8350-4105-9f87-db732c74faed\") " Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.161494 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-utilities" (OuterVolumeSpecName: "utilities") pod "658c01a0-8350-4105-9f87-db732c74faed" (UID: "658c01a0-8350-4105-9f87-db732c74faed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.163448 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/658c01a0-8350-4105-9f87-db732c74faed-kube-api-access-tmbtj" (OuterVolumeSpecName: "kube-api-access-tmbtj") pod "658c01a0-8350-4105-9f87-db732c74faed" (UID: "658c01a0-8350-4105-9f87-db732c74faed"). InnerVolumeSpecName "kube-api-access-tmbtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.178277 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "658c01a0-8350-4105-9f87-db732c74faed" (UID: "658c01a0-8350-4105-9f87-db732c74faed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.262347 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.262376 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/658c01a0-8350-4105-9f87-db732c74faed-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:47 crc kubenswrapper[4663]: I1212 14:06:47.262387 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmbtj\" (UniqueName: \"kubernetes.io/projected/658c01a0-8350-4105-9f87-db732c74faed-kube-api-access-tmbtj\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.037257 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b955r" event={"ID":"658c01a0-8350-4105-9f87-db732c74faed","Type":"ContainerDied","Data":"dac489de5820e120d91bafc3019079f980ae1aceb42d00157321220af4077e91"} Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.037684 4663 scope.go:117] "RemoveContainer" containerID="9acd8f4233cc3a403c408ec1675368ed15ae6a62a8a78e8359dce2c71b6b7122" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.037522 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b955r" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.051948 4663 scope.go:117] "RemoveContainer" containerID="54761cdb7fb4bdeacf39a43a47844c53cedea015a8359475e7056b279cf560a5" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.059817 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b955r"] Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.062271 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b955r"] Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.065322 4663 scope.go:117] "RemoveContainer" containerID="d84a2c7993fb748891e91404ababb5e12d26ac1de1c80815e9acb9cbc7630bbb" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.543814 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lsccb"] Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.543983 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lsccb" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="registry-server" containerID="cri-o://1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede" gracePeriod=2 Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.874960 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" path="/var/lib/kubelet/pods/47373cbe-7df3-4621-adaf-cf8b6ec73f61/volumes" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.875658 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="658c01a0-8350-4105-9f87-db732c74faed" path="/var/lib/kubelet/pods/658c01a0-8350-4105-9f87-db732c74faed/volumes" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.901296 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.981422 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-catalog-content\") pod \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.981458 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-utilities\") pod \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.981509 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-689cd\" (UniqueName: \"kubernetes.io/projected/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-kube-api-access-689cd\") pod \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\" (UID: \"fe66ebb4-36ec-4c3d-8658-3b014a710cfa\") " Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.981650 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cd569bdb6-hzql8"] Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.981837 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" podUID="79f074de-de62-4db5-8313-5b6a285db97f" containerName="controller-manager" containerID="cri-o://7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b" gracePeriod=30 Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.982082 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-utilities" (OuterVolumeSpecName: "utilities") pod "fe66ebb4-36ec-4c3d-8658-3b014a710cfa" (UID: "fe66ebb4-36ec-4c3d-8658-3b014a710cfa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:48 crc kubenswrapper[4663]: I1212 14:06:48.986139 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-kube-api-access-689cd" (OuterVolumeSpecName: "kube-api-access-689cd") pod "fe66ebb4-36ec-4c3d-8658-3b014a710cfa" (UID: "fe66ebb4-36ec-4c3d-8658-3b014a710cfa"). InnerVolumeSpecName "kube-api-access-689cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.047079 4663 generic.go:334] "Generic (PLEG): container finished" podID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerID="1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede" exitCode=0 Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.047250 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lsccb" event={"ID":"fe66ebb4-36ec-4c3d-8658-3b014a710cfa","Type":"ContainerDied","Data":"1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede"} Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.047327 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lsccb" event={"ID":"fe66ebb4-36ec-4c3d-8658-3b014a710cfa","Type":"ContainerDied","Data":"355d38a0366a99f842d4ee4bd0086b2d54631303660f837a9b1853ce43f7243b"} Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.047405 4663 scope.go:117] "RemoveContainer" containerID="1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.047550 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lsccb" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.059777 4663 scope.go:117] "RemoveContainer" containerID="859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.064571 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe66ebb4-36ec-4c3d-8658-3b014a710cfa" (UID: "fe66ebb4-36ec-4c3d-8658-3b014a710cfa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.076467 4663 scope.go:117] "RemoveContainer" containerID="ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.082341 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.082431 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.082490 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-689cd\" (UniqueName: \"kubernetes.io/projected/fe66ebb4-36ec-4c3d-8658-3b014a710cfa-kube-api-access-689cd\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.085945 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf"] Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.086193 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" podUID="170ec3cf-0fa5-47eb-9357-05f41d67723c" containerName="route-controller-manager" containerID="cri-o://75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e" gracePeriod=30 Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.090737 4663 scope.go:117] "RemoveContainer" containerID="1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede" Dec 12 14:06:49 crc kubenswrapper[4663]: E1212 14:06:49.091113 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede\": container with ID starting with 1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede not found: ID does not exist" containerID="1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.091231 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede"} err="failed to get container status \"1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede\": rpc error: code = NotFound desc = could not find container \"1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede\": container with ID starting with 1a0e96032ebc6c7c243a3c9b2750e036ce9a84aa9d2fdaab35dadad47fbceede not found: ID does not exist" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.091323 4663 scope.go:117] "RemoveContainer" containerID="859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18" Dec 12 14:06:49 crc kubenswrapper[4663]: E1212 14:06:49.091591 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18\": container with ID starting with 859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18 not found: ID does not exist" containerID="859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.091680 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18"} err="failed to get container status \"859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18\": rpc error: code = NotFound desc = could not find container \"859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18\": container with ID starting with 859934fe05019c6f1703d62df37af56b3fd275b2dda4eeb47fa75e2f8e0f1f18 not found: ID does not exist" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.091764 4663 scope.go:117] "RemoveContainer" containerID="ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea" Dec 12 14:06:49 crc kubenswrapper[4663]: E1212 14:06:49.092316 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea\": container with ID starting with ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea not found: ID does not exist" containerID="ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.092362 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea"} err="failed to get container status \"ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea\": rpc error: code = NotFound desc = could not find container \"ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea\": container with ID starting with ee6ec0d1023ca7a1f4c6d61d5efa66a074dd8ec61845c9091a87f5484b155fea not found: ID does not exist" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.368494 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lsccb"] Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.375619 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lsccb"] Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.403125 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.425814 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486018 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-config\") pod \"79f074de-de62-4db5-8313-5b6a285db97f\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486116 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-config\") pod \"170ec3cf-0fa5-47eb-9357-05f41d67723c\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486145 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/170ec3cf-0fa5-47eb-9357-05f41d67723c-serving-cert\") pod \"170ec3cf-0fa5-47eb-9357-05f41d67723c\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486164 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-client-ca\") pod \"170ec3cf-0fa5-47eb-9357-05f41d67723c\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486185 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv7h2\" (UniqueName: \"kubernetes.io/projected/79f074de-de62-4db5-8313-5b6a285db97f-kube-api-access-fv7h2\") pod \"79f074de-de62-4db5-8313-5b6a285db97f\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486779 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-config" (OuterVolumeSpecName: "config") pod "170ec3cf-0fa5-47eb-9357-05f41d67723c" (UID: "170ec3cf-0fa5-47eb-9357-05f41d67723c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486791 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79f074de-de62-4db5-8313-5b6a285db97f-serving-cert\") pod \"79f074de-de62-4db5-8313-5b6a285db97f\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486841 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fls8\" (UniqueName: \"kubernetes.io/projected/170ec3cf-0fa5-47eb-9357-05f41d67723c-kube-api-access-8fls8\") pod \"170ec3cf-0fa5-47eb-9357-05f41d67723c\" (UID: \"170ec3cf-0fa5-47eb-9357-05f41d67723c\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486862 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-client-ca\") pod \"79f074de-de62-4db5-8313-5b6a285db97f\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486894 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-proxy-ca-bundles\") pod \"79f074de-de62-4db5-8313-5b6a285db97f\" (UID: \"79f074de-de62-4db5-8313-5b6a285db97f\") " Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486885 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-config" (OuterVolumeSpecName: "config") pod "79f074de-de62-4db5-8313-5b6a285db97f" (UID: "79f074de-de62-4db5-8313-5b6a285db97f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.486993 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-client-ca" (OuterVolumeSpecName: "client-ca") pod "170ec3cf-0fa5-47eb-9357-05f41d67723c" (UID: "170ec3cf-0fa5-47eb-9357-05f41d67723c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.487294 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-client-ca" (OuterVolumeSpecName: "client-ca") pod "79f074de-de62-4db5-8313-5b6a285db97f" (UID: "79f074de-de62-4db5-8313-5b6a285db97f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.487317 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.487376 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.487362 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "79f074de-de62-4db5-8313-5b6a285db97f" (UID: "79f074de-de62-4db5-8313-5b6a285db97f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.487389 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/170ec3cf-0fa5-47eb-9357-05f41d67723c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.488901 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/170ec3cf-0fa5-47eb-9357-05f41d67723c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "170ec3cf-0fa5-47eb-9357-05f41d67723c" (UID: "170ec3cf-0fa5-47eb-9357-05f41d67723c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.488911 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/170ec3cf-0fa5-47eb-9357-05f41d67723c-kube-api-access-8fls8" (OuterVolumeSpecName: "kube-api-access-8fls8") pod "170ec3cf-0fa5-47eb-9357-05f41d67723c" (UID: "170ec3cf-0fa5-47eb-9357-05f41d67723c"). InnerVolumeSpecName "kube-api-access-8fls8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.488959 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79f074de-de62-4db5-8313-5b6a285db97f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "79f074de-de62-4db5-8313-5b6a285db97f" (UID: "79f074de-de62-4db5-8313-5b6a285db97f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.489394 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79f074de-de62-4db5-8313-5b6a285db97f-kube-api-access-fv7h2" (OuterVolumeSpecName: "kube-api-access-fv7h2") pod "79f074de-de62-4db5-8313-5b6a285db97f" (UID: "79f074de-de62-4db5-8313-5b6a285db97f"). InnerVolumeSpecName "kube-api-access-fv7h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.588161 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.588189 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/170ec3cf-0fa5-47eb-9357-05f41d67723c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.588198 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv7h2\" (UniqueName: \"kubernetes.io/projected/79f074de-de62-4db5-8313-5b6a285db97f-kube-api-access-fv7h2\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.588208 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79f074de-de62-4db5-8313-5b6a285db97f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.588216 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fls8\" (UniqueName: \"kubernetes.io/projected/170ec3cf-0fa5-47eb-9357-05f41d67723c-kube-api-access-8fls8\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:49 crc kubenswrapper[4663]: I1212 14:06:49.588224 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/79f074de-de62-4db5-8313-5b6a285db97f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.052105 4663 generic.go:334] "Generic (PLEG): container finished" podID="170ec3cf-0fa5-47eb-9357-05f41d67723c" containerID="75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e" exitCode=0 Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.052153 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.052150 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" event={"ID":"170ec3cf-0fa5-47eb-9357-05f41d67723c","Type":"ContainerDied","Data":"75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e"} Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.052195 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf" event={"ID":"170ec3cf-0fa5-47eb-9357-05f41d67723c","Type":"ContainerDied","Data":"97e988459cab8577c8430ce56b0ef3b35022d9f269ddca1914a59647ba38741a"} Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.052213 4663 scope.go:117] "RemoveContainer" containerID="75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.060479 4663 generic.go:334] "Generic (PLEG): container finished" podID="79f074de-de62-4db5-8313-5b6a285db97f" containerID="7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b" exitCode=0 Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.060504 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.060549 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" event={"ID":"79f074de-de62-4db5-8313-5b6a285db97f","Type":"ContainerDied","Data":"7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b"} Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.060572 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd569bdb6-hzql8" event={"ID":"79f074de-de62-4db5-8313-5b6a285db97f","Type":"ContainerDied","Data":"9da2558b52663283ffdd02a213815d0eb4971f1bc1034dec5ae29b5f04fb0607"} Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.071215 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.071843 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.073889 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77c55447db-2fptf"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.077653 4663 scope.go:117] "RemoveContainer" containerID="75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.078220 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e\": container with ID starting with 75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e not found: ID does not exist" containerID="75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.078250 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e"} err="failed to get container status \"75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e\": rpc error: code = NotFound desc = could not find container \"75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e\": container with ID starting with 75b8496580e46790637831a675acafe8b07b5b8145a084aa132f3486f03ce55e not found: ID does not exist" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.078272 4663 scope.go:117] "RemoveContainer" containerID="7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.089612 4663 scope.go:117] "RemoveContainer" containerID="7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.090625 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b\": container with ID starting with 7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b not found: ID does not exist" containerID="7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.090659 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b"} err="failed to get container status \"7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b\": rpc error: code = NotFound desc = could not find container \"7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b\": container with ID starting with 7693d047c1de8f4a536e3c11be0e173013fe3f207af97885b87e4c7afddc1c3b not found: ID does not exist" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.099353 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cd569bdb6-hzql8"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.101992 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7cd569bdb6-hzql8"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866268 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs"] Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866629 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866643 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866653 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866659 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866669 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866674 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866685 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866690 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866698 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866704 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866710 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866715 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866722 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170ec3cf-0fa5-47eb-9357-05f41d67723c" containerName="route-controller-manager" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866727 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="170ec3cf-0fa5-47eb-9357-05f41d67723c" containerName="route-controller-manager" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866733 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79f074de-de62-4db5-8313-5b6a285db97f" containerName="controller-manager" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866738 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="79f074de-de62-4db5-8313-5b6a285db97f" containerName="controller-manager" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866759 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866765 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866773 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866778 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866787 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866794 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866801 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866806 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866811 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866816 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="extract-content" Dec 12 14:06:50 crc kubenswrapper[4663]: E1212 14:06:50.866821 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866827 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="extract-utilities" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866912 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="79f074de-de62-4db5-8313-5b6a285db97f" containerName="controller-manager" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866920 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="658c01a0-8350-4105-9f87-db732c74faed" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866926 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="47373cbe-7df3-4621-adaf-cf8b6ec73f61" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866935 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866944 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d11b55a2-e4a0-47e6-a9a9-5b935aabfe4a" containerName="registry-server" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.866950 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="170ec3cf-0fa5-47eb-9357-05f41d67723c" containerName="route-controller-manager" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.867309 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.868314 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.868905 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.870143 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.870302 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.870945 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.870981 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.871100 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.871360 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.871385 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.873561 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.873715 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.873922 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.873929 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.874755 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="170ec3cf-0fa5-47eb-9357-05f41d67723c" path="/var/lib/kubelet/pods/170ec3cf-0fa5-47eb-9357-05f41d67723c/volumes" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.875042 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.875232 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79f074de-de62-4db5-8313-5b6a285db97f" path="/var/lib/kubelet/pods/79f074de-de62-4db5-8313-5b6a285db97f/volumes" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.875628 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe66ebb4-36ec-4c3d-8658-3b014a710cfa" path="/var/lib/kubelet/pods/fe66ebb4-36ec-4c3d-8658-3b014a710cfa/volumes" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.878385 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.881762 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.884012 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w"] Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.900930 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-proxy-ca-bundles\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.900963 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-client-ca\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.900985 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-config\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.901003 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp9r2\" (UniqueName: \"kubernetes.io/projected/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-kube-api-access-zp9r2\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.901128 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9af277c-033a-4ab8-8473-6c5c2725fe8c-serving-cert\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.901161 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-serving-cert\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.901230 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k5fj\" (UniqueName: \"kubernetes.io/projected/d9af277c-033a-4ab8-8473-6c5c2725fe8c-kube-api-access-2k5fj\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.901313 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-client-ca\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:50 crc kubenswrapper[4663]: I1212 14:06:50.901362 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-config\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002257 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9af277c-033a-4ab8-8473-6c5c2725fe8c-serving-cert\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002297 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-serving-cert\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002335 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k5fj\" (UniqueName: \"kubernetes.io/projected/d9af277c-033a-4ab8-8473-6c5c2725fe8c-kube-api-access-2k5fj\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002370 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-client-ca\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002401 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-config\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002428 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-proxy-ca-bundles\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002440 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-client-ca\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002459 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-config\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.002473 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp9r2\" (UniqueName: \"kubernetes.io/projected/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-kube-api-access-zp9r2\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.003578 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-client-ca\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.003772 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-config\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.003992 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-proxy-ca-bundles\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.004014 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-client-ca\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.004502 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-config\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.008456 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9af277c-033a-4ab8-8473-6c5c2725fe8c-serving-cert\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.008461 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-serving-cert\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.017739 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k5fj\" (UniqueName: \"kubernetes.io/projected/d9af277c-033a-4ab8-8473-6c5c2725fe8c-kube-api-access-2k5fj\") pod \"controller-manager-6b5cb4ccdf-8kp7w\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.017857 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp9r2\" (UniqueName: \"kubernetes.io/projected/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-kube-api-access-zp9r2\") pod \"route-controller-manager-7d8cd554c5-rfbrs\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.181235 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.186586 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.543003 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w"] Dec 12 14:06:51 crc kubenswrapper[4663]: W1212 14:06:51.548811 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9af277c_033a_4ab8_8473_6c5c2725fe8c.slice/crio-7b3e5d12f02e873602128a8785877734931bce143131551a6aacac35c4c998b3 WatchSource:0}: Error finding container 7b3e5d12f02e873602128a8785877734931bce143131551a6aacac35c4c998b3: Status 404 returned error can't find the container with id 7b3e5d12f02e873602128a8785877734931bce143131551a6aacac35c4c998b3 Dec 12 14:06:51 crc kubenswrapper[4663]: I1212 14:06:51.587151 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs"] Dec 12 14:06:51 crc kubenswrapper[4663]: W1212 14:06:51.587641 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod716e1b3f_7c40_4d82_a8af_fd8d88b2391e.slice/crio-a53a4e1e9e5a7c8e6dce17ca059dc5bff6109bd9af4217f8e4a4025f55824599 WatchSource:0}: Error finding container a53a4e1e9e5a7c8e6dce17ca059dc5bff6109bd9af4217f8e4a4025f55824599: Status 404 returned error can't find the container with id a53a4e1e9e5a7c8e6dce17ca059dc5bff6109bd9af4217f8e4a4025f55824599 Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.073405 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" event={"ID":"d9af277c-033a-4ab8-8473-6c5c2725fe8c","Type":"ContainerStarted","Data":"ef4ae975419721136ec6875f05a78bf27f016f776019fa87872b31f38fdcbf70"} Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.073606 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" event={"ID":"d9af277c-033a-4ab8-8473-6c5c2725fe8c","Type":"ContainerStarted","Data":"7b3e5d12f02e873602128a8785877734931bce143131551a6aacac35c4c998b3"} Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.073824 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.078292 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" event={"ID":"716e1b3f-7c40-4d82-a8af-fd8d88b2391e","Type":"ContainerStarted","Data":"26e023d890271a8587cf4e96569b2dd95725062f887d802d5b1ff6f402604ba5"} Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.078346 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" event={"ID":"716e1b3f-7c40-4d82-a8af-fd8d88b2391e","Type":"ContainerStarted","Data":"a53a4e1e9e5a7c8e6dce17ca059dc5bff6109bd9af4217f8e4a4025f55824599"} Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.078684 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.078986 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.088632 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" podStartSLOduration=4.088617742 podStartE2EDuration="4.088617742s" podCreationTimestamp="2025-12-12 14:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:52.087042713 +0000 UTC m=+191.512460767" watchObservedRunningTime="2025-12-12 14:06:52.088617742 +0000 UTC m=+191.514035795" Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.116998 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" podStartSLOduration=3.116983007 podStartE2EDuration="3.116983007s" podCreationTimestamp="2025-12-12 14:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:52.099049919 +0000 UTC m=+191.524467973" watchObservedRunningTime="2025-12-12 14:06:52.116983007 +0000 UTC m=+191.542401062" Dec 12 14:06:52 crc kubenswrapper[4663]: I1212 14:06:52.118909 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.114869 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.115583 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.118663 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.119308 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.119503 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.138424 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c0c94851-4249-456c-b974-4cb59e2f51d0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.138487 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c0c94851-4249-456c-b974-4cb59e2f51d0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.239892 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c0c94851-4249-456c-b974-4cb59e2f51d0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.239978 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c0c94851-4249-456c-b974-4cb59e2f51d0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.240049 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c0c94851-4249-456c-b974-4cb59e2f51d0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.255612 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c0c94851-4249-456c-b974-4cb59e2f51d0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.429608 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:54 crc kubenswrapper[4663]: I1212 14:06:54.779757 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 12 14:06:55 crc kubenswrapper[4663]: I1212 14:06:55.095598 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c0c94851-4249-456c-b974-4cb59e2f51d0","Type":"ContainerStarted","Data":"64a72efdda2d01be04f60a69f81fc8b2a235c04a54e33063d4aec4924f01781c"} Dec 12 14:06:55 crc kubenswrapper[4663]: I1212 14:06:55.095816 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c0c94851-4249-456c-b974-4cb59e2f51d0","Type":"ContainerStarted","Data":"8f75f6c87dee5433cd8242a966d40400228465cd8815ddc67bb1fa81eb07dd65"} Dec 12 14:06:55 crc kubenswrapper[4663]: I1212 14:06:55.111459 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.111448962 podStartE2EDuration="1.111448962s" podCreationTimestamp="2025-12-12 14:06:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:06:55.108821945 +0000 UTC m=+194.534239999" watchObservedRunningTime="2025-12-12 14:06:55.111448962 +0000 UTC m=+194.536867016" Dec 12 14:06:56 crc kubenswrapper[4663]: I1212 14:06:56.101160 4663 generic.go:334] "Generic (PLEG): container finished" podID="c0c94851-4249-456c-b974-4cb59e2f51d0" containerID="64a72efdda2d01be04f60a69f81fc8b2a235c04a54e33063d4aec4924f01781c" exitCode=0 Dec 12 14:06:56 crc kubenswrapper[4663]: I1212 14:06:56.101739 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c0c94851-4249-456c-b974-4cb59e2f51d0","Type":"ContainerDied","Data":"64a72efdda2d01be04f60a69f81fc8b2a235c04a54e33063d4aec4924f01781c"} Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.311465 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.372479 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c0c94851-4249-456c-b974-4cb59e2f51d0-kubelet-dir\") pod \"c0c94851-4249-456c-b974-4cb59e2f51d0\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.372690 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c0c94851-4249-456c-b974-4cb59e2f51d0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c0c94851-4249-456c-b974-4cb59e2f51d0" (UID: "c0c94851-4249-456c-b974-4cb59e2f51d0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.372820 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c0c94851-4249-456c-b974-4cb59e2f51d0-kube-api-access\") pod \"c0c94851-4249-456c-b974-4cb59e2f51d0\" (UID: \"c0c94851-4249-456c-b974-4cb59e2f51d0\") " Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.373142 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c0c94851-4249-456c-b974-4cb59e2f51d0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.379198 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c94851-4249-456c-b974-4cb59e2f51d0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c0c94851-4249-456c-b974-4cb59e2f51d0" (UID: "c0c94851-4249-456c-b974-4cb59e2f51d0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:06:57 crc kubenswrapper[4663]: I1212 14:06:57.474214 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c0c94851-4249-456c-b974-4cb59e2f51d0-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:06:58 crc kubenswrapper[4663]: I1212 14:06:58.110604 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c0c94851-4249-456c-b974-4cb59e2f51d0","Type":"ContainerDied","Data":"8f75f6c87dee5433cd8242a966d40400228465cd8815ddc67bb1fa81eb07dd65"} Dec 12 14:06:58 crc kubenswrapper[4663]: I1212 14:06:58.110763 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f75f6c87dee5433cd8242a966d40400228465cd8815ddc67bb1fa81eb07dd65" Dec 12 14:06:58 crc kubenswrapper[4663]: I1212 14:06:58.110637 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.711066 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 12 14:07:01 crc kubenswrapper[4663]: E1212 14:07:01.711427 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c94851-4249-456c-b974-4cb59e2f51d0" containerName="pruner" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.711437 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c94851-4249-456c-b974-4cb59e2f51d0" containerName="pruner" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.711519 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c94851-4249-456c-b974-4cb59e2f51d0" containerName="pruner" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.711830 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.713303 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.714513 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.719565 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.816180 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7619fd-25f9-4050-8b13-3496ed370665-kube-api-access\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.816249 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-var-lock\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.816362 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.916929 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-var-lock\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.916987 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.917024 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7619fd-25f9-4050-8b13-3496ed370665-kube-api-access\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.917029 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-var-lock\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.917043 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-kubelet-dir\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:01 crc kubenswrapper[4663]: I1212 14:07:01.931099 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7619fd-25f9-4050-8b13-3496ed370665-kube-api-access\") pod \"installer-9-crc\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:02 crc kubenswrapper[4663]: I1212 14:07:02.023848 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:02 crc kubenswrapper[4663]: I1212 14:07:02.354394 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 12 14:07:03 crc kubenswrapper[4663]: I1212 14:07:03.131529 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c7619fd-25f9-4050-8b13-3496ed370665","Type":"ContainerStarted","Data":"5b4addbe81d4764176e84d17d9596c900564d36e4b808f383c6260767d373c84"} Dec 12 14:07:03 crc kubenswrapper[4663]: I1212 14:07:03.131867 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c7619fd-25f9-4050-8b13-3496ed370665","Type":"ContainerStarted","Data":"cf215106bddd754b2670354bb7ad5c36983b14ceeada8969de33a8e5c50d2030"} Dec 12 14:07:03 crc kubenswrapper[4663]: I1212 14:07:03.140958 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.140946097 podStartE2EDuration="2.140946097s" podCreationTimestamp="2025-12-12 14:07:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:03.140721997 +0000 UTC m=+202.566140051" watchObservedRunningTime="2025-12-12 14:07:03.140946097 +0000 UTC m=+202.566364142" Dec 12 14:07:06 crc kubenswrapper[4663]: I1212 14:07:06.128290 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:07:06 crc kubenswrapper[4663]: I1212 14:07:06.128511 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:07:06 crc kubenswrapper[4663]: I1212 14:07:06.128549 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:07:06 crc kubenswrapper[4663]: I1212 14:07:06.129036 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:07:06 crc kubenswrapper[4663]: I1212 14:07:06.129085 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5" gracePeriod=600 Dec 12 14:07:07 crc kubenswrapper[4663]: I1212 14:07:07.147603 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5" exitCode=0 Dec 12 14:07:07 crc kubenswrapper[4663]: I1212 14:07:07.147679 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5"} Dec 12 14:07:07 crc kubenswrapper[4663]: I1212 14:07:07.147882 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"1e9ca5f2aeb91abe2527bcfb8a9ce3821e586c8032624f0a0df36fe70c76b8ae"} Dec 12 14:07:08 crc kubenswrapper[4663]: I1212 14:07:08.979834 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w"] Dec 12 14:07:08 crc kubenswrapper[4663]: I1212 14:07:08.980155 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" podUID="d9af277c-033a-4ab8-8473-6c5c2725fe8c" containerName="controller-manager" containerID="cri-o://ef4ae975419721136ec6875f05a78bf27f016f776019fa87872b31f38fdcbf70" gracePeriod=30 Dec 12 14:07:08 crc kubenswrapper[4663]: I1212 14:07:08.991519 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs"] Dec 12 14:07:08 crc kubenswrapper[4663]: I1212 14:07:08.991684 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" podUID="716e1b3f-7c40-4d82-a8af-fd8d88b2391e" containerName="route-controller-manager" containerID="cri-o://26e023d890271a8587cf4e96569b2dd95725062f887d802d5b1ff6f402604ba5" gracePeriod=30 Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.156292 4663 generic.go:334] "Generic (PLEG): container finished" podID="d9af277c-033a-4ab8-8473-6c5c2725fe8c" containerID="ef4ae975419721136ec6875f05a78bf27f016f776019fa87872b31f38fdcbf70" exitCode=0 Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.156496 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" event={"ID":"d9af277c-033a-4ab8-8473-6c5c2725fe8c","Type":"ContainerDied","Data":"ef4ae975419721136ec6875f05a78bf27f016f776019fa87872b31f38fdcbf70"} Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.158861 4663 generic.go:334] "Generic (PLEG): container finished" podID="716e1b3f-7c40-4d82-a8af-fd8d88b2391e" containerID="26e023d890271a8587cf4e96569b2dd95725062f887d802d5b1ff6f402604ba5" exitCode=0 Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.158906 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" event={"ID":"716e1b3f-7c40-4d82-a8af-fd8d88b2391e","Type":"ContainerDied","Data":"26e023d890271a8587cf4e96569b2dd95725062f887d802d5b1ff6f402604ba5"} Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.397358 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.479391 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494436 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k5fj\" (UniqueName: \"kubernetes.io/projected/d9af277c-033a-4ab8-8473-6c5c2725fe8c-kube-api-access-2k5fj\") pod \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494487 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-proxy-ca-bundles\") pod \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494514 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-config\") pod \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494543 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-serving-cert\") pod \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494556 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-client-ca\") pod \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494594 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9af277c-033a-4ab8-8473-6c5c2725fe8c-serving-cert\") pod \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494619 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-client-ca\") pod \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494634 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-config\") pod \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\" (UID: \"d9af277c-033a-4ab8-8473-6c5c2725fe8c\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.494669 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp9r2\" (UniqueName: \"kubernetes.io/projected/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-kube-api-access-zp9r2\") pod \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\" (UID: \"716e1b3f-7c40-4d82-a8af-fd8d88b2391e\") " Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.495445 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d9af277c-033a-4ab8-8473-6c5c2725fe8c" (UID: "d9af277c-033a-4ab8-8473-6c5c2725fe8c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.495705 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-config" (OuterVolumeSpecName: "config") pod "716e1b3f-7c40-4d82-a8af-fd8d88b2391e" (UID: "716e1b3f-7c40-4d82-a8af-fd8d88b2391e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.495997 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-client-ca" (OuterVolumeSpecName: "client-ca") pod "d9af277c-033a-4ab8-8473-6c5c2725fe8c" (UID: "d9af277c-033a-4ab8-8473-6c5c2725fe8c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.496213 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-client-ca" (OuterVolumeSpecName: "client-ca") pod "716e1b3f-7c40-4d82-a8af-fd8d88b2391e" (UID: "716e1b3f-7c40-4d82-a8af-fd8d88b2391e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.496534 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-config" (OuterVolumeSpecName: "config") pod "d9af277c-033a-4ab8-8473-6c5c2725fe8c" (UID: "d9af277c-033a-4ab8-8473-6c5c2725fe8c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.505261 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "716e1b3f-7c40-4d82-a8af-fd8d88b2391e" (UID: "716e1b3f-7c40-4d82-a8af-fd8d88b2391e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.505326 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-kube-api-access-zp9r2" (OuterVolumeSpecName: "kube-api-access-zp9r2") pod "716e1b3f-7c40-4d82-a8af-fd8d88b2391e" (UID: "716e1b3f-7c40-4d82-a8af-fd8d88b2391e"). InnerVolumeSpecName "kube-api-access-zp9r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.505357 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9af277c-033a-4ab8-8473-6c5c2725fe8c-kube-api-access-2k5fj" (OuterVolumeSpecName: "kube-api-access-2k5fj") pod "d9af277c-033a-4ab8-8473-6c5c2725fe8c" (UID: "d9af277c-033a-4ab8-8473-6c5c2725fe8c"). InnerVolumeSpecName "kube-api-access-2k5fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.505354 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9af277c-033a-4ab8-8473-6c5c2725fe8c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d9af277c-033a-4ab8-8473-6c5c2725fe8c" (UID: "d9af277c-033a-4ab8-8473-6c5c2725fe8c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596610 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9af277c-033a-4ab8-8473-6c5c2725fe8c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596642 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596651 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596661 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp9r2\" (UniqueName: \"kubernetes.io/projected/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-kube-api-access-zp9r2\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596672 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k5fj\" (UniqueName: \"kubernetes.io/projected/d9af277c-033a-4ab8-8473-6c5c2725fe8c-kube-api-access-2k5fj\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596680 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d9af277c-033a-4ab8-8473-6c5c2725fe8c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596688 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596696 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.596703 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/716e1b3f-7c40-4d82-a8af-fd8d88b2391e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:09 crc kubenswrapper[4663]: I1212 14:07:09.958105 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" podUID="b3f05375-5727-4536-8d9e-541cdb88127a" containerName="oauth-openshift" containerID="cri-o://d3d96a5d7bb7984387143e22f43ccbdfa53bbcdc08d6b15a0de7aaa89b5ddb2a" gracePeriod=15 Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.164955 4663 generic.go:334] "Generic (PLEG): container finished" podID="b3f05375-5727-4536-8d9e-541cdb88127a" containerID="d3d96a5d7bb7984387143e22f43ccbdfa53bbcdc08d6b15a0de7aaa89b5ddb2a" exitCode=0 Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.165013 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" event={"ID":"b3f05375-5727-4536-8d9e-541cdb88127a","Type":"ContainerDied","Data":"d3d96a5d7bb7984387143e22f43ccbdfa53bbcdc08d6b15a0de7aaa89b5ddb2a"} Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.166072 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" event={"ID":"d9af277c-033a-4ab8-8473-6c5c2725fe8c","Type":"ContainerDied","Data":"7b3e5d12f02e873602128a8785877734931bce143131551a6aacac35c4c998b3"} Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.166094 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.166124 4663 scope.go:117] "RemoveContainer" containerID="ef4ae975419721136ec6875f05a78bf27f016f776019fa87872b31f38fdcbf70" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.168669 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" event={"ID":"716e1b3f-7c40-4d82-a8af-fd8d88b2391e","Type":"ContainerDied","Data":"a53a4e1e9e5a7c8e6dce17ca059dc5bff6109bd9af4217f8e4a4025f55824599"} Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.168733 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.181486 4663 scope.go:117] "RemoveContainer" containerID="26e023d890271a8587cf4e96569b2dd95725062f887d802d5b1ff6f402604ba5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.191235 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.193165 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6b5cb4ccdf-8kp7w"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.197494 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.200773 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d8cd554c5-rfbrs"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.243112 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303569 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-audit-policies\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303618 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-session\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303638 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-login\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303657 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-idp-0-file-data\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303723 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3f05375-5727-4536-8d9e-541cdb88127a-audit-dir\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303760 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-error\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303783 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxgm4\" (UniqueName: \"kubernetes.io/projected/b3f05375-5727-4536-8d9e-541cdb88127a-kube-api-access-pxgm4\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303814 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-service-ca\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303850 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-ocp-branding-template\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303864 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-provider-selection\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303884 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-trusted-ca-bundle\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303908 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-serving-cert\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.303935 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-cliconfig\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.304407 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-router-certs\") pod \"b3f05375-5727-4536-8d9e-541cdb88127a\" (UID: \"b3f05375-5727-4536-8d9e-541cdb88127a\") " Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.304882 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.304922 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.305258 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3f05375-5727-4536-8d9e-541cdb88127a-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.305487 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.305557 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.308991 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.310718 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.311258 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.311303 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3f05375-5727-4536-8d9e-541cdb88127a-kube-api-access-pxgm4" (OuterVolumeSpecName: "kube-api-access-pxgm4") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "kube-api-access-pxgm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.311457 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.311587 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.311714 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.310549 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.311886 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "b3f05375-5727-4536-8d9e-541cdb88127a" (UID: "b3f05375-5727-4536-8d9e-541cdb88127a"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405654 4663 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b3f05375-5727-4536-8d9e-541cdb88127a-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405686 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405698 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxgm4\" (UniqueName: \"kubernetes.io/projected/b3f05375-5727-4536-8d9e-541cdb88127a-kube-api-access-pxgm4\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405733 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405757 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405769 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405779 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405801 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405810 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405818 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405825 4663 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b3f05375-5727-4536-8d9e-541cdb88127a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405833 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405842 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.405850 4663 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b3f05375-5727-4536-8d9e-541cdb88127a-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.875119 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="716e1b3f-7c40-4d82-a8af-fd8d88b2391e" path="/var/lib/kubelet/pods/716e1b3f-7c40-4d82-a8af-fd8d88b2391e/volumes" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.875583 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9af277c-033a-4ab8-8473-6c5c2725fe8c" path="/var/lib/kubelet/pods/d9af277c-033a-4ab8-8473-6c5c2725fe8c/volumes" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.879953 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-d98c8bb58-vt5l5"] Dec 12 14:07:10 crc kubenswrapper[4663]: E1212 14:07:10.880123 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716e1b3f-7c40-4d82-a8af-fd8d88b2391e" containerName="route-controller-manager" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.880138 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="716e1b3f-7c40-4d82-a8af-fd8d88b2391e" containerName="route-controller-manager" Dec 12 14:07:10 crc kubenswrapper[4663]: E1212 14:07:10.880154 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3f05375-5727-4536-8d9e-541cdb88127a" containerName="oauth-openshift" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.880161 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3f05375-5727-4536-8d9e-541cdb88127a" containerName="oauth-openshift" Dec 12 14:07:10 crc kubenswrapper[4663]: E1212 14:07:10.880175 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9af277c-033a-4ab8-8473-6c5c2725fe8c" containerName="controller-manager" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.880181 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9af277c-033a-4ab8-8473-6c5c2725fe8c" containerName="controller-manager" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.887934 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3f05375-5727-4536-8d9e-541cdb88127a" containerName="oauth-openshift" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.887989 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9af277c-033a-4ab8-8473-6c5c2725fe8c" containerName="controller-manager" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.888003 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="716e1b3f-7c40-4d82-a8af-fd8d88b2391e" containerName="route-controller-manager" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.888494 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.890225 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.891153 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.895077 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.895278 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.895375 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.895546 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.895910 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.896195 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.896948 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.897541 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.897921 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.898475 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.899022 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.899297 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.899376 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.899486 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.899619 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.899758 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.902923 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-d98c8bb58-vt5l5"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.906894 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w"] Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911091 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-error\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911211 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b8b05ef1-0462-4a58-a4a7-be524f998999-audit-dir\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911297 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd7k7\" (UniqueName: \"kubernetes.io/projected/87b7b699-68ca-4774-bcff-c3dec801aacf-kube-api-access-jd7k7\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911376 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911451 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-service-ca\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911514 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-router-certs\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911678 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-client-ca\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911826 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7e8b390-e9b0-4c74-bad7-56037a8d583b-serving-cert\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.911956 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912056 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912148 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tzqn\" (UniqueName: \"kubernetes.io/projected/c7e8b390-e9b0-4c74-bad7-56037a8d583b-kube-api-access-6tzqn\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912277 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912567 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mzct\" (UniqueName: \"kubernetes.io/projected/b8b05ef1-0462-4a58-a4a7-be524f998999-kube-api-access-8mzct\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912698 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-config\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912856 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-client-ca\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.912960 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-audit-policies\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.913161 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.913332 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-config\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.913466 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-proxy-ca-bundles\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.913630 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.913791 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-login\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.913902 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-session\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:10 crc kubenswrapper[4663]: I1212 14:07:10.914027 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87b7b699-68ca-4774-bcff-c3dec801aacf-serving-cert\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015248 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tzqn\" (UniqueName: \"kubernetes.io/projected/c7e8b390-e9b0-4c74-bad7-56037a8d583b-kube-api-access-6tzqn\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015284 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015321 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mzct\" (UniqueName: \"kubernetes.io/projected/b8b05ef1-0462-4a58-a4a7-be524f998999-kube-api-access-8mzct\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015344 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-config\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015367 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-client-ca\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015382 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-audit-policies\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015422 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015437 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-config\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015452 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-proxy-ca-bundles\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015488 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015506 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-login\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015524 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-session\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015564 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87b7b699-68ca-4774-bcff-c3dec801aacf-serving-cert\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015586 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-error\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015603 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd7k7\" (UniqueName: \"kubernetes.io/projected/87b7b699-68ca-4774-bcff-c3dec801aacf-kube-api-access-jd7k7\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015617 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b8b05ef1-0462-4a58-a4a7-be524f998999-audit-dir\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015657 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015677 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-service-ca\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015692 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-router-certs\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015721 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-client-ca\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015739 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7e8b390-e9b0-4c74-bad7-56037a8d583b-serving-cert\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015776 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.015804 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.016116 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.016381 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-client-ca\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.016612 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b8b05ef1-0462-4a58-a4a7-be524f998999-audit-dir\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.016765 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-config\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.016829 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-proxy-ca-bundles\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.017140 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-client-ca\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.017336 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-service-ca\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.017498 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-audit-policies\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.017664 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-config\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.019088 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020010 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-error\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020098 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020203 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7e8b390-e9b0-4c74-bad7-56037a8d583b-serving-cert\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020227 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020358 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-router-certs\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020448 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020449 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87b7b699-68ca-4774-bcff-c3dec801aacf-serving-cert\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020481 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-user-template-login\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.020633 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.021332 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b8b05ef1-0462-4a58-a4a7-be524f998999-v4-0-config-system-session\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.027886 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tzqn\" (UniqueName: \"kubernetes.io/projected/c7e8b390-e9b0-4c74-bad7-56037a8d583b-kube-api-access-6tzqn\") pod \"controller-manager-d98c8bb58-vt5l5\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.027919 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mzct\" (UniqueName: \"kubernetes.io/projected/b8b05ef1-0462-4a58-a4a7-be524f998999-kube-api-access-8mzct\") pod \"oauth-openshift-5584c6b7fb-9zp5w\" (UID: \"b8b05ef1-0462-4a58-a4a7-be524f998999\") " pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.029242 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd7k7\" (UniqueName: \"kubernetes.io/projected/87b7b699-68ca-4774-bcff-c3dec801aacf-kube-api-access-jd7k7\") pod \"route-controller-manager-58d9fbf45-mb5rv\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.178021 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" event={"ID":"b3f05375-5727-4536-8d9e-541cdb88127a","Type":"ContainerDied","Data":"7b1ae78f142a657654543479a7b3fa7d6b7cdfce49c99a160951e2bac2cff2ad"} Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.178075 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5xjs7" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.178227 4663 scope.go:117] "RemoveContainer" containerID="d3d96a5d7bb7984387143e22f43ccbdfa53bbcdc08d6b15a0de7aaa89b5ddb2a" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.188963 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5xjs7"] Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.190579 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5xjs7"] Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.209425 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.217273 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.220985 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.544177 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-d98c8bb58-vt5l5"] Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.579708 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv"] Dec 12 14:07:11 crc kubenswrapper[4663]: I1212 14:07:11.582624 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w"] Dec 12 14:07:11 crc kubenswrapper[4663]: W1212 14:07:11.585166 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87b7b699_68ca_4774_bcff_c3dec801aacf.slice/crio-bfdc29c93c3fad4a5efe00a4103bb33404ea23b366546afb7bee600ca3a72749 WatchSource:0}: Error finding container bfdc29c93c3fad4a5efe00a4103bb33404ea23b366546afb7bee600ca3a72749: Status 404 returned error can't find the container with id bfdc29c93c3fad4a5efe00a4103bb33404ea23b366546afb7bee600ca3a72749 Dec 12 14:07:11 crc kubenswrapper[4663]: W1212 14:07:11.586832 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8b05ef1_0462_4a58_a4a7_be524f998999.slice/crio-9213daf57bd8e4c651917c1533c3e02fabafc614fbd208fdf1e2fcbb645913f0 WatchSource:0}: Error finding container 9213daf57bd8e4c651917c1533c3e02fabafc614fbd208fdf1e2fcbb645913f0: Status 404 returned error can't find the container with id 9213daf57bd8e4c651917c1533c3e02fabafc614fbd208fdf1e2fcbb645913f0 Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.192083 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" event={"ID":"87b7b699-68ca-4774-bcff-c3dec801aacf","Type":"ContainerStarted","Data":"210279b8231caf05aeb338eb9a811a1d43b7e36a5e5c1dbd94961b040717f39a"} Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.192333 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.192347 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" event={"ID":"87b7b699-68ca-4774-bcff-c3dec801aacf","Type":"ContainerStarted","Data":"bfdc29c93c3fad4a5efe00a4103bb33404ea23b366546afb7bee600ca3a72749"} Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.194109 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" event={"ID":"b8b05ef1-0462-4a58-a4a7-be524f998999","Type":"ContainerStarted","Data":"f527ebd28af576773b3dc0b6690b0e6eccf6c5758f0801487b8b0d954540a655"} Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.194143 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" event={"ID":"b8b05ef1-0462-4a58-a4a7-be524f998999","Type":"ContainerStarted","Data":"9213daf57bd8e4c651917c1533c3e02fabafc614fbd208fdf1e2fcbb645913f0"} Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.194413 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.198856 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.198895 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" event={"ID":"c7e8b390-e9b0-4c74-bad7-56037a8d583b","Type":"ContainerStarted","Data":"bfaf475b362ccec24e67bdf524213f026b5ad3d9d13a149083329161953b67bc"} Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.198915 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" event={"ID":"c7e8b390-e9b0-4c74-bad7-56037a8d583b","Type":"ContainerStarted","Data":"38ae8050405dd696e9145a646d6126e4dfa0ba221ee1e567f7b769003a736891"} Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.199121 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.202337 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.204344 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.206663 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" podStartSLOduration=3.206653685 podStartE2EDuration="3.206653685s" podCreationTimestamp="2025-12-12 14:07:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:12.205298418 +0000 UTC m=+211.630716472" watchObservedRunningTime="2025-12-12 14:07:12.206653685 +0000 UTC m=+211.632071740" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.239392 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5584c6b7fb-9zp5w" podStartSLOduration=28.239379483 podStartE2EDuration="28.239379483s" podCreationTimestamp="2025-12-12 14:06:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:12.236493822 +0000 UTC m=+211.661911876" watchObservedRunningTime="2025-12-12 14:07:12.239379483 +0000 UTC m=+211.664797538" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.254527 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" podStartSLOduration=4.254510828 podStartE2EDuration="4.254510828s" podCreationTimestamp="2025-12-12 14:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:12.25090055 +0000 UTC m=+211.676318604" watchObservedRunningTime="2025-12-12 14:07:12.254510828 +0000 UTC m=+211.679928882" Dec 12 14:07:12 crc kubenswrapper[4663]: I1212 14:07:12.875100 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3f05375-5727-4536-8d9e-541cdb88127a" path="/var/lib/kubelet/pods/b3f05375-5727-4536-8d9e-541cdb88127a/volumes" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.265568 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-82xhz"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.266093 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-82xhz" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="registry-server" containerID="cri-o://fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35" gracePeriod=30 Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.272652 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8gc87"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.272974 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8gc87" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="registry-server" containerID="cri-o://ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885" gracePeriod=30 Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.277225 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsrh2"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.277345 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerName="marketplace-operator" containerID="cri-o://629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7" gracePeriod=30 Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.286104 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppb9f"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.286303 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ppb9f" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="registry-server" containerID="cri-o://3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696" gracePeriod=30 Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.293346 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zlg8l"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.293787 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zlg8l" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="registry-server" containerID="cri-o://a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1" gracePeriod=30 Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.298490 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d9jff"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.299092 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.311257 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d9jff"] Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.363861 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcjsk\" (UniqueName: \"kubernetes.io/projected/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-kube-api-access-dcjsk\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.363914 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.363933 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.464556 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcjsk\" (UniqueName: \"kubernetes.io/projected/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-kube-api-access-dcjsk\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.464648 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.464668 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.465783 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.469979 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.479443 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcjsk\" (UniqueName: \"kubernetes.io/projected/675a4c10-36f1-4fd9-97be-bfb87fe63f9a-kube-api-access-dcjsk\") pod \"marketplace-operator-79b997595-d9jff\" (UID: \"675a4c10-36f1-4fd9-97be-bfb87fe63f9a\") " pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.618028 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.764984 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.873719 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4q8d4\" (UniqueName: \"kubernetes.io/projected/6fc61e95-424c-4039-92c6-2a07590312d6-kube-api-access-4q8d4\") pod \"6fc61e95-424c-4039-92c6-2a07590312d6\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.873810 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-utilities\") pod \"6fc61e95-424c-4039-92c6-2a07590312d6\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.873863 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-catalog-content\") pod \"6fc61e95-424c-4039-92c6-2a07590312d6\" (UID: \"6fc61e95-424c-4039-92c6-2a07590312d6\") " Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.875063 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-utilities" (OuterVolumeSpecName: "utilities") pod "6fc61e95-424c-4039-92c6-2a07590312d6" (UID: "6fc61e95-424c-4039-92c6-2a07590312d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.877589 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fc61e95-424c-4039-92c6-2a07590312d6-kube-api-access-4q8d4" (OuterVolumeSpecName: "kube-api-access-4q8d4") pod "6fc61e95-424c-4039-92c6-2a07590312d6" (UID: "6fc61e95-424c-4039-92c6-2a07590312d6"). InnerVolumeSpecName "kube-api-access-4q8d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.912778 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fc61e95-424c-4039-92c6-2a07590312d6" (UID: "6fc61e95-424c-4039-92c6-2a07590312d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.927466 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.930999 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.933676 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.937514 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.975257 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.975283 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4q8d4\" (UniqueName: \"kubernetes.io/projected/6fc61e95-424c-4039-92c6-2a07590312d6-kube-api-access-4q8d4\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:25 crc kubenswrapper[4663]: I1212 14:07:25.975294 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fc61e95-424c-4039-92c6-2a07590312d6-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.050453 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d9jff"] Dec 12 14:07:26 crc kubenswrapper[4663]: W1212 14:07:26.053904 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod675a4c10_36f1_4fd9_97be_bfb87fe63f9a.slice/crio-237f292275ba3b3506a88a1ec45ec3950f698f2dd470585a0c81dce191707003 WatchSource:0}: Error finding container 237f292275ba3b3506a88a1ec45ec3950f698f2dd470585a0c81dce191707003: Status 404 returned error can't find the container with id 237f292275ba3b3506a88a1ec45ec3950f698f2dd470585a0c81dce191707003 Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076614 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-catalog-content\") pod \"34cf2033-292d-493b-803a-d0171fae07d3\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076647 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-catalog-content\") pod \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076680 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bxsb\" (UniqueName: \"kubernetes.io/projected/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-kube-api-access-2bxsb\") pod \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076705 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqnmg\" (UniqueName: \"kubernetes.io/projected/34cf2033-292d-493b-803a-d0171fae07d3-kube-api-access-fqnmg\") pod \"34cf2033-292d-493b-803a-d0171fae07d3\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076769 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkwfk\" (UniqueName: \"kubernetes.io/projected/7742e8b6-05a7-4a11-b883-7e5488fba6d9-kube-api-access-qkwfk\") pod \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076798 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-catalog-content\") pod \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076814 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-operator-metrics\") pod \"0b25f747-6011-4a3b-8058-bb1115a6880e\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076834 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-utilities\") pod \"34cf2033-292d-493b-803a-d0171fae07d3\" (UID: \"34cf2033-292d-493b-803a-d0171fae07d3\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076861 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-utilities\") pod \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\" (UID: \"7742e8b6-05a7-4a11-b883-7e5488fba6d9\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076883 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w8zb\" (UniqueName: \"kubernetes.io/projected/0b25f747-6011-4a3b-8058-bb1115a6880e-kube-api-access-9w8zb\") pod \"0b25f747-6011-4a3b-8058-bb1115a6880e\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076903 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-trusted-ca\") pod \"0b25f747-6011-4a3b-8058-bb1115a6880e\" (UID: \"0b25f747-6011-4a3b-8058-bb1115a6880e\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.076918 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-utilities\") pod \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\" (UID: \"0620a18b-0f89-46ff-b112-0b28ee4d7d5a\") " Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.077644 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-utilities" (OuterVolumeSpecName: "utilities") pod "0620a18b-0f89-46ff-b112-0b28ee4d7d5a" (UID: "0620a18b-0f89-46ff-b112-0b28ee4d7d5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.078395 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "0b25f747-6011-4a3b-8058-bb1115a6880e" (UID: "0b25f747-6011-4a3b-8058-bb1115a6880e"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.078701 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-utilities" (OuterVolumeSpecName: "utilities") pod "34cf2033-292d-493b-803a-d0171fae07d3" (UID: "34cf2033-292d-493b-803a-d0171fae07d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.080593 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "0b25f747-6011-4a3b-8058-bb1115a6880e" (UID: "0b25f747-6011-4a3b-8058-bb1115a6880e"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.081621 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b25f747-6011-4a3b-8058-bb1115a6880e-kube-api-access-9w8zb" (OuterVolumeSpecName: "kube-api-access-9w8zb") pod "0b25f747-6011-4a3b-8058-bb1115a6880e" (UID: "0b25f747-6011-4a3b-8058-bb1115a6880e"). InnerVolumeSpecName "kube-api-access-9w8zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.081914 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34cf2033-292d-493b-803a-d0171fae07d3-kube-api-access-fqnmg" (OuterVolumeSpecName: "kube-api-access-fqnmg") pod "34cf2033-292d-493b-803a-d0171fae07d3" (UID: "34cf2033-292d-493b-803a-d0171fae07d3"). InnerVolumeSpecName "kube-api-access-fqnmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.081943 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-kube-api-access-2bxsb" (OuterVolumeSpecName: "kube-api-access-2bxsb") pod "0620a18b-0f89-46ff-b112-0b28ee4d7d5a" (UID: "0620a18b-0f89-46ff-b112-0b28ee4d7d5a"). InnerVolumeSpecName "kube-api-access-2bxsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.081955 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7742e8b6-05a7-4a11-b883-7e5488fba6d9-kube-api-access-qkwfk" (OuterVolumeSpecName: "kube-api-access-qkwfk") pod "7742e8b6-05a7-4a11-b883-7e5488fba6d9" (UID: "7742e8b6-05a7-4a11-b883-7e5488fba6d9"). InnerVolumeSpecName "kube-api-access-qkwfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.083668 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-utilities" (OuterVolumeSpecName: "utilities") pod "7742e8b6-05a7-4a11-b883-7e5488fba6d9" (UID: "7742e8b6-05a7-4a11-b883-7e5488fba6d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.095462 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7742e8b6-05a7-4a11-b883-7e5488fba6d9" (UID: "7742e8b6-05a7-4a11-b883-7e5488fba6d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.126626 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0620a18b-0f89-46ff-b112-0b28ee4d7d5a" (UID: "0620a18b-0f89-46ff-b112-0b28ee4d7d5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.166701 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "34cf2033-292d-493b-803a-d0171fae07d3" (UID: "34cf2033-292d-493b-803a-d0171fae07d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178178 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178496 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178609 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178673 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bxsb\" (UniqueName: \"kubernetes.io/projected/0620a18b-0f89-46ff-b112-0b28ee4d7d5a-kube-api-access-2bxsb\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178757 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqnmg\" (UniqueName: \"kubernetes.io/projected/34cf2033-292d-493b-803a-d0171fae07d3-kube-api-access-fqnmg\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178850 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkwfk\" (UniqueName: \"kubernetes.io/projected/7742e8b6-05a7-4a11-b883-7e5488fba6d9-kube-api-access-qkwfk\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.178933 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.179017 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.179094 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34cf2033-292d-493b-803a-d0171fae07d3-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.179150 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7742e8b6-05a7-4a11-b883-7e5488fba6d9-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.179231 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w8zb\" (UniqueName: \"kubernetes.io/projected/0b25f747-6011-4a3b-8058-bb1115a6880e-kube-api-access-9w8zb\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.179769 4663 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0b25f747-6011-4a3b-8058-bb1115a6880e-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.259365 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" event={"ID":"675a4c10-36f1-4fd9-97be-bfb87fe63f9a","Type":"ContainerStarted","Data":"8b4bc0ceac5a0236a3d270140cd26750cc37ffc800c33912b5d58b04fb0294cf"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.259403 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" event={"ID":"675a4c10-36f1-4fd9-97be-bfb87fe63f9a","Type":"ContainerStarted","Data":"237f292275ba3b3506a88a1ec45ec3950f698f2dd470585a0c81dce191707003"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.259581 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.261340 4663 generic.go:334] "Generic (PLEG): container finished" podID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerID="629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7" exitCode=0 Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.261397 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" event={"ID":"0b25f747-6011-4a3b-8058-bb1115a6880e","Type":"ContainerDied","Data":"629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.261420 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" event={"ID":"0b25f747-6011-4a3b-8058-bb1115a6880e","Type":"ContainerDied","Data":"3f384b0dd19a2bb370113ec5c9d29629bbed588decb26780b4956c7afc2e8066"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.261434 4663 scope.go:117] "RemoveContainer" containerID="629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.261512 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xsrh2" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.264016 4663 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d9jff container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.264060 4663 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" podUID="675a4c10-36f1-4fd9-97be-bfb87fe63f9a" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.265898 4663 generic.go:334] "Generic (PLEG): container finished" podID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerID="3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696" exitCode=0 Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.265954 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppb9f" event={"ID":"7742e8b6-05a7-4a11-b883-7e5488fba6d9","Type":"ContainerDied","Data":"3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.265978 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppb9f" event={"ID":"7742e8b6-05a7-4a11-b883-7e5488fba6d9","Type":"ContainerDied","Data":"482e4c89cd8eb6a06d25810cc94ca480d4d3d638454463d6b5e8aafed341fb50"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.266032 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppb9f" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.268829 4663 generic.go:334] "Generic (PLEG): container finished" podID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerID="ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885" exitCode=0 Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.268858 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8gc87" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.268899 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8gc87" event={"ID":"0620a18b-0f89-46ff-b112-0b28ee4d7d5a","Type":"ContainerDied","Data":"ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.268925 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8gc87" event={"ID":"0620a18b-0f89-46ff-b112-0b28ee4d7d5a","Type":"ContainerDied","Data":"dbf9fcbf90933c89cf4a078821ee95da658d3f4ac217f2155be6bbd384924b28"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.270566 4663 generic.go:334] "Generic (PLEG): container finished" podID="34cf2033-292d-493b-803a-d0171fae07d3" containerID="a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1" exitCode=0 Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.270643 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlg8l" event={"ID":"34cf2033-292d-493b-803a-d0171fae07d3","Type":"ContainerDied","Data":"a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.270664 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zlg8l" event={"ID":"34cf2033-292d-493b-803a-d0171fae07d3","Type":"ContainerDied","Data":"c5ca4f380eb5c0c407bda10d960878c84bfc1ae58fe92e29805b3d4d22a714b6"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.270858 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zlg8l" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.283504 4663 scope.go:117] "RemoveContainer" containerID="629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.284112 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7\": container with ID starting with 629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7 not found: ID does not exist" containerID="629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.284137 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7"} err="failed to get container status \"629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7\": rpc error: code = NotFound desc = could not find container \"629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7\": container with ID starting with 629d836cf96d5bdb6fef06fd727646f8f4c240fd3dd49c80d108244aebd801c7 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.284157 4663 scope.go:117] "RemoveContainer" containerID="3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.287941 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" podStartSLOduration=1.2879249640000001 podStartE2EDuration="1.287924964s" podCreationTimestamp="2025-12-12 14:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:26.284627882 +0000 UTC m=+225.710045936" watchObservedRunningTime="2025-12-12 14:07:26.287924964 +0000 UTC m=+225.713343057" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.287996 4663 generic.go:334] "Generic (PLEG): container finished" podID="6fc61e95-424c-4039-92c6-2a07590312d6" containerID="fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35" exitCode=0 Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.288080 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82xhz" event={"ID":"6fc61e95-424c-4039-92c6-2a07590312d6","Type":"ContainerDied","Data":"fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.288113 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-82xhz" event={"ID":"6fc61e95-424c-4039-92c6-2a07590312d6","Type":"ContainerDied","Data":"a52e2f27500ef68a72f3c1d503cfb1e06051c806782ee89ae29696eaceefddbf"} Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.288161 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-82xhz" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.310613 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsrh2"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.310708 4663 scope.go:117] "RemoveContainer" containerID="5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.314512 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xsrh2"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.322412 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-82xhz"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.324808 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-82xhz"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.327837 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppb9f"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.328421 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppb9f"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.336377 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zlg8l"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.338888 4663 scope.go:117] "RemoveContainer" containerID="3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.339918 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zlg8l"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.342205 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8gc87"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.343524 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8gc87"] Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.348641 4663 scope.go:117] "RemoveContainer" containerID="3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.348936 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696\": container with ID starting with 3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696 not found: ID does not exist" containerID="3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.348964 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696"} err="failed to get container status \"3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696\": rpc error: code = NotFound desc = could not find container \"3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696\": container with ID starting with 3780b6f6389df86ea638c00b7fa60b003f68ccd3725c5387aac358b4f21fc696 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.348984 4663 scope.go:117] "RemoveContainer" containerID="5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.349482 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe\": container with ID starting with 5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe not found: ID does not exist" containerID="5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.349509 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe"} err="failed to get container status \"5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe\": rpc error: code = NotFound desc = could not find container \"5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe\": container with ID starting with 5f2c40a88c1e31396b4ecd24fac375a7dd2c3bc962d6489b6ac5eed3ca8bc1fe not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.349531 4663 scope.go:117] "RemoveContainer" containerID="3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.349812 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a\": container with ID starting with 3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a not found: ID does not exist" containerID="3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.349834 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a"} err="failed to get container status \"3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a\": rpc error: code = NotFound desc = could not find container \"3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a\": container with ID starting with 3480eb4b70ac698471f539f6d36d9c42088c50d1ee858a24fb04bc187c34b58a not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.349848 4663 scope.go:117] "RemoveContainer" containerID="ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.360136 4663 scope.go:117] "RemoveContainer" containerID="a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.370562 4663 scope.go:117] "RemoveContainer" containerID="af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.382509 4663 scope.go:117] "RemoveContainer" containerID="ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.382832 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885\": container with ID starting with ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885 not found: ID does not exist" containerID="ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.382862 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885"} err="failed to get container status \"ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885\": rpc error: code = NotFound desc = could not find container \"ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885\": container with ID starting with ad66fe086449b227349ce752f00e4adbfc7d035608b20c57a1aa847215df9885 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.382958 4663 scope.go:117] "RemoveContainer" containerID="a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.383236 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af\": container with ID starting with a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af not found: ID does not exist" containerID="a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.383263 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af"} err="failed to get container status \"a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af\": rpc error: code = NotFound desc = could not find container \"a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af\": container with ID starting with a6a7a8afc1c9e917c24e6a2a4accac17e2683e8470c68424a53f2bc9828b46af not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.383299 4663 scope.go:117] "RemoveContainer" containerID="af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.383631 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3\": container with ID starting with af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3 not found: ID does not exist" containerID="af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.383652 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3"} err="failed to get container status \"af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3\": rpc error: code = NotFound desc = could not find container \"af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3\": container with ID starting with af427005327c2b0acdb38d7eda497f91e9d39d31d0321d8b7bfd2c66e1b39ac3 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.383665 4663 scope.go:117] "RemoveContainer" containerID="a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.410079 4663 scope.go:117] "RemoveContainer" containerID="28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.424370 4663 scope.go:117] "RemoveContainer" containerID="d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.437873 4663 scope.go:117] "RemoveContainer" containerID="a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.438228 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1\": container with ID starting with a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1 not found: ID does not exist" containerID="a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.438263 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1"} err="failed to get container status \"a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1\": rpc error: code = NotFound desc = could not find container \"a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1\": container with ID starting with a2028c7ff64929df1465d77f8ecb20ceffc639b89e93bc33a889f76bfeda0ab1 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.438289 4663 scope.go:117] "RemoveContainer" containerID="28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.439139 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161\": container with ID starting with 28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161 not found: ID does not exist" containerID="28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.439169 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161"} err="failed to get container status \"28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161\": rpc error: code = NotFound desc = could not find container \"28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161\": container with ID starting with 28bd25742f604c2390172f45f56d1a9a0733d97f7361c98daf4c1b80a842c161 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.439188 4663 scope.go:117] "RemoveContainer" containerID="d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.441241 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9\": container with ID starting with d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9 not found: ID does not exist" containerID="d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.441274 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9"} err="failed to get container status \"d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9\": rpc error: code = NotFound desc = could not find container \"d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9\": container with ID starting with d4b287c3b63a008e70f5c4724a419a2d53aabcf0fe58d76503caf95da2f1f2f9 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.441296 4663 scope.go:117] "RemoveContainer" containerID="fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.452722 4663 scope.go:117] "RemoveContainer" containerID="69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.462753 4663 scope.go:117] "RemoveContainer" containerID="31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.472654 4663 scope.go:117] "RemoveContainer" containerID="fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.472901 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35\": container with ID starting with fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35 not found: ID does not exist" containerID="fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.472927 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35"} err="failed to get container status \"fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35\": rpc error: code = NotFound desc = could not find container \"fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35\": container with ID starting with fe090883a3d2ca318cae44c94262bc86e8ddb4538a00e4249014c24be6a30c35 not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.472946 4663 scope.go:117] "RemoveContainer" containerID="69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.473197 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd\": container with ID starting with 69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd not found: ID does not exist" containerID="69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.473229 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd"} err="failed to get container status \"69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd\": rpc error: code = NotFound desc = could not find container \"69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd\": container with ID starting with 69aa8ece4795ce54a834a331b4db44ee3098756c8131597b14871f27578f8efd not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.473252 4663 scope.go:117] "RemoveContainer" containerID="31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df" Dec 12 14:07:26 crc kubenswrapper[4663]: E1212 14:07:26.473669 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df\": container with ID starting with 31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df not found: ID does not exist" containerID="31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.473700 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df"} err="failed to get container status \"31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df\": rpc error: code = NotFound desc = could not find container \"31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df\": container with ID starting with 31739ffff46bb580b3851b162b305be7ef7c9baa05bb640ea02cf6ed111e14df not found: ID does not exist" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.875018 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" path="/var/lib/kubelet/pods/0620a18b-0f89-46ff-b112-0b28ee4d7d5a/volumes" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.875674 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" path="/var/lib/kubelet/pods/0b25f747-6011-4a3b-8058-bb1115a6880e/volumes" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.876102 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34cf2033-292d-493b-803a-d0171fae07d3" path="/var/lib/kubelet/pods/34cf2033-292d-493b-803a-d0171fae07d3/volumes" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.877106 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" path="/var/lib/kubelet/pods/6fc61e95-424c-4039-92c6-2a07590312d6/volumes" Dec 12 14:07:26 crc kubenswrapper[4663]: I1212 14:07:26.877616 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" path="/var/lib/kubelet/pods/7742e8b6-05a7-4a11-b883-7e5488fba6d9/volumes" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.299213 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-d9jff" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477438 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6kscx"] Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477599 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477610 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477618 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477625 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477633 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477639 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477649 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477654 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477663 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477668 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477676 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477681 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477687 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477692 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477700 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477705 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477711 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477716 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="extract-utilities" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477723 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerName="marketplace-operator" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477728 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerName="marketplace-operator" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477736 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477755 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="extract-content" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477762 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477767 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: E1212 14:07:27.477775 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477780 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477848 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="7742e8b6-05a7-4a11-b883-7e5488fba6d9" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477858 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fc61e95-424c-4039-92c6-2a07590312d6" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477864 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="34cf2033-292d-493b-803a-d0171fae07d3" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477871 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="0620a18b-0f89-46ff-b112-0b28ee4d7d5a" containerName="registry-server" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.477877 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b25f747-6011-4a3b-8058-bb1115a6880e" containerName="marketplace-operator" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.478477 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.481398 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.484326 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kscx"] Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.593137 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eecf200a-7ab0-4b4e-b914-ec2d18d44505-utilities\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.593185 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eecf200a-7ab0-4b4e-b914-ec2d18d44505-catalog-content\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.593218 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4ltm\" (UniqueName: \"kubernetes.io/projected/eecf200a-7ab0-4b4e-b914-ec2d18d44505-kube-api-access-p4ltm\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.676942 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4vt5n"] Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.678846 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.680310 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.684225 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4vt5n"] Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.695256 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eecf200a-7ab0-4b4e-b914-ec2d18d44505-catalog-content\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.695321 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4ltm\" (UniqueName: \"kubernetes.io/projected/eecf200a-7ab0-4b4e-b914-ec2d18d44505-kube-api-access-p4ltm\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.695385 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eecf200a-7ab0-4b4e-b914-ec2d18d44505-utilities\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.695699 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eecf200a-7ab0-4b4e-b914-ec2d18d44505-catalog-content\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.695736 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eecf200a-7ab0-4b4e-b914-ec2d18d44505-utilities\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.712017 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4ltm\" (UniqueName: \"kubernetes.io/projected/eecf200a-7ab0-4b4e-b914-ec2d18d44505-kube-api-access-p4ltm\") pod \"redhat-marketplace-6kscx\" (UID: \"eecf200a-7ab0-4b4e-b914-ec2d18d44505\") " pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.795965 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.796526 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh48n\" (UniqueName: \"kubernetes.io/projected/2ded57f0-621c-4daa-b14c-4c3417835f5a-kube-api-access-qh48n\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.796593 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ded57f0-621c-4daa-b14c-4c3417835f5a-catalog-content\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.796709 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ded57f0-621c-4daa-b14c-4c3417835f5a-utilities\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.897293 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ded57f0-621c-4daa-b14c-4c3417835f5a-utilities\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.897357 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh48n\" (UniqueName: \"kubernetes.io/projected/2ded57f0-621c-4daa-b14c-4c3417835f5a-kube-api-access-qh48n\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.897375 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ded57f0-621c-4daa-b14c-4c3417835f5a-catalog-content\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.897926 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ded57f0-621c-4daa-b14c-4c3417835f5a-catalog-content\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.898025 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ded57f0-621c-4daa-b14c-4c3417835f5a-utilities\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.910952 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh48n\" (UniqueName: \"kubernetes.io/projected/2ded57f0-621c-4daa-b14c-4c3417835f5a-kube-api-access-qh48n\") pod \"redhat-operators-4vt5n\" (UID: \"2ded57f0-621c-4daa-b14c-4c3417835f5a\") " pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:27 crc kubenswrapper[4663]: I1212 14:07:27.990698 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.120831 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6kscx"] Dec 12 14:07:28 crc kubenswrapper[4663]: W1212 14:07:28.125356 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeecf200a_7ab0_4b4e_b914_ec2d18d44505.slice/crio-be4ef9a6a377a276f9e427eb3891ce94196e8cb3864c2ab46edce5dd23a2511c WatchSource:0}: Error finding container be4ef9a6a377a276f9e427eb3891ce94196e8cb3864c2ab46edce5dd23a2511c: Status 404 returned error can't find the container with id be4ef9a6a377a276f9e427eb3891ce94196e8cb3864c2ab46edce5dd23a2511c Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.309952 4663 generic.go:334] "Generic (PLEG): container finished" podID="eecf200a-7ab0-4b4e-b914-ec2d18d44505" containerID="122f5adb171a0220c55fd8ed381f04b4115e20e158f10036425916297ca947b5" exitCode=0 Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.310026 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kscx" event={"ID":"eecf200a-7ab0-4b4e-b914-ec2d18d44505","Type":"ContainerDied","Data":"122f5adb171a0220c55fd8ed381f04b4115e20e158f10036425916297ca947b5"} Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.310197 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kscx" event={"ID":"eecf200a-7ab0-4b4e-b914-ec2d18d44505","Type":"ContainerStarted","Data":"be4ef9a6a377a276f9e427eb3891ce94196e8cb3864c2ab46edce5dd23a2511c"} Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.324011 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4vt5n"] Dec 12 14:07:28 crc kubenswrapper[4663]: W1212 14:07:28.329041 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ded57f0_621c_4daa_b14c_4c3417835f5a.slice/crio-adace25d54e35085072c4a8b24bdd1adbf364df78d87324685534fae557fe0f6 WatchSource:0}: Error finding container adace25d54e35085072c4a8b24bdd1adbf364df78d87324685534fae557fe0f6: Status 404 returned error can't find the container with id adace25d54e35085072c4a8b24bdd1adbf364df78d87324685534fae557fe0f6 Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.990524 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-d98c8bb58-vt5l5"] Dec 12 14:07:28 crc kubenswrapper[4663]: I1212 14:07:28.990877 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" podUID="c7e8b390-e9b0-4c74-bad7-56037a8d583b" containerName="controller-manager" containerID="cri-o://bfaf475b362ccec24e67bdf524213f026b5ad3d9d13a149083329161953b67bc" gracePeriod=30 Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.103951 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv"] Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.104298 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" podUID="87b7b699-68ca-4774-bcff-c3dec801aacf" containerName="route-controller-manager" containerID="cri-o://210279b8231caf05aeb338eb9a811a1d43b7e36a5e5c1dbd94961b040717f39a" gracePeriod=30 Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.316072 4663 generic.go:334] "Generic (PLEG): container finished" podID="87b7b699-68ca-4774-bcff-c3dec801aacf" containerID="210279b8231caf05aeb338eb9a811a1d43b7e36a5e5c1dbd94961b040717f39a" exitCode=0 Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.316156 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" event={"ID":"87b7b699-68ca-4774-bcff-c3dec801aacf","Type":"ContainerDied","Data":"210279b8231caf05aeb338eb9a811a1d43b7e36a5e5c1dbd94961b040717f39a"} Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.328668 4663 generic.go:334] "Generic (PLEG): container finished" podID="eecf200a-7ab0-4b4e-b914-ec2d18d44505" containerID="54fe9454a48d511b17bc0c6a56aa57bb9165fd98606ad6a98296de80a7c5159f" exitCode=0 Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.328858 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kscx" event={"ID":"eecf200a-7ab0-4b4e-b914-ec2d18d44505","Type":"ContainerDied","Data":"54fe9454a48d511b17bc0c6a56aa57bb9165fd98606ad6a98296de80a7c5159f"} Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.332414 4663 generic.go:334] "Generic (PLEG): container finished" podID="2ded57f0-621c-4daa-b14c-4c3417835f5a" containerID="f0487d702e9ae486af1ef37b8a5aa2955cfebaa28caa3d95d7329ea62cb72489" exitCode=0 Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.332463 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vt5n" event={"ID":"2ded57f0-621c-4daa-b14c-4c3417835f5a","Type":"ContainerDied","Data":"f0487d702e9ae486af1ef37b8a5aa2955cfebaa28caa3d95d7329ea62cb72489"} Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.332489 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vt5n" event={"ID":"2ded57f0-621c-4daa-b14c-4c3417835f5a","Type":"ContainerStarted","Data":"adace25d54e35085072c4a8b24bdd1adbf364df78d87324685534fae557fe0f6"} Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.343157 4663 generic.go:334] "Generic (PLEG): container finished" podID="c7e8b390-e9b0-4c74-bad7-56037a8d583b" containerID="bfaf475b362ccec24e67bdf524213f026b5ad3d9d13a149083329161953b67bc" exitCode=0 Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.343215 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" event={"ID":"c7e8b390-e9b0-4c74-bad7-56037a8d583b","Type":"ContainerDied","Data":"bfaf475b362ccec24e67bdf524213f026b5ad3d9d13a149083329161953b67bc"} Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.455623 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.487822 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522670 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-client-ca\") pod \"87b7b699-68ca-4774-bcff-c3dec801aacf\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522708 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-config\") pod \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522774 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd7k7\" (UniqueName: \"kubernetes.io/projected/87b7b699-68ca-4774-bcff-c3dec801aacf-kube-api-access-jd7k7\") pod \"87b7b699-68ca-4774-bcff-c3dec801aacf\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522804 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tzqn\" (UniqueName: \"kubernetes.io/projected/c7e8b390-e9b0-4c74-bad7-56037a8d583b-kube-api-access-6tzqn\") pod \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522835 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7e8b390-e9b0-4c74-bad7-56037a8d583b-serving-cert\") pod \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522859 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-proxy-ca-bundles\") pod \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522883 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87b7b699-68ca-4774-bcff-c3dec801aacf-serving-cert\") pod \"87b7b699-68ca-4774-bcff-c3dec801aacf\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522911 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-client-ca\") pod \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\" (UID: \"c7e8b390-e9b0-4c74-bad7-56037a8d583b\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.522937 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-config\") pod \"87b7b699-68ca-4774-bcff-c3dec801aacf\" (UID: \"87b7b699-68ca-4774-bcff-c3dec801aacf\") " Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.523511 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-client-ca" (OuterVolumeSpecName: "client-ca") pod "c7e8b390-e9b0-4c74-bad7-56037a8d583b" (UID: "c7e8b390-e9b0-4c74-bad7-56037a8d583b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.523655 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-config" (OuterVolumeSpecName: "config") pod "87b7b699-68ca-4774-bcff-c3dec801aacf" (UID: "87b7b699-68ca-4774-bcff-c3dec801aacf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.523709 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-client-ca" (OuterVolumeSpecName: "client-ca") pod "87b7b699-68ca-4774-bcff-c3dec801aacf" (UID: "87b7b699-68ca-4774-bcff-c3dec801aacf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.524160 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c7e8b390-e9b0-4c74-bad7-56037a8d583b" (UID: "c7e8b390-e9b0-4c74-bad7-56037a8d583b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.524701 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-config" (OuterVolumeSpecName: "config") pod "c7e8b390-e9b0-4c74-bad7-56037a8d583b" (UID: "c7e8b390-e9b0-4c74-bad7-56037a8d583b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.526930 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87b7b699-68ca-4774-bcff-c3dec801aacf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "87b7b699-68ca-4774-bcff-c3dec801aacf" (UID: "87b7b699-68ca-4774-bcff-c3dec801aacf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.527079 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7e8b390-e9b0-4c74-bad7-56037a8d583b-kube-api-access-6tzqn" (OuterVolumeSpecName: "kube-api-access-6tzqn") pod "c7e8b390-e9b0-4c74-bad7-56037a8d583b" (UID: "c7e8b390-e9b0-4c74-bad7-56037a8d583b"). InnerVolumeSpecName "kube-api-access-6tzqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.527182 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87b7b699-68ca-4774-bcff-c3dec801aacf-kube-api-access-jd7k7" (OuterVolumeSpecName: "kube-api-access-jd7k7") pod "87b7b699-68ca-4774-bcff-c3dec801aacf" (UID: "87b7b699-68ca-4774-bcff-c3dec801aacf"). InnerVolumeSpecName "kube-api-access-jd7k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.527462 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7e8b390-e9b0-4c74-bad7-56037a8d583b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c7e8b390-e9b0-4c74-bad7-56037a8d583b" (UID: "c7e8b390-e9b0-4c74-bad7-56037a8d583b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624393 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624420 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624431 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd7k7\" (UniqueName: \"kubernetes.io/projected/87b7b699-68ca-4774-bcff-c3dec801aacf-kube-api-access-jd7k7\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624442 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tzqn\" (UniqueName: \"kubernetes.io/projected/c7e8b390-e9b0-4c74-bad7-56037a8d583b-kube-api-access-6tzqn\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624450 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7e8b390-e9b0-4c74-bad7-56037a8d583b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624458 4663 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624465 4663 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87b7b699-68ca-4774-bcff-c3dec801aacf-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624472 4663 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c7e8b390-e9b0-4c74-bad7-56037a8d583b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.624480 4663 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87b7b699-68ca-4774-bcff-c3dec801aacf-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.878617 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2rkr4"] Dec 12 14:07:29 crc kubenswrapper[4663]: E1212 14:07:29.880299 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e8b390-e9b0-4c74-bad7-56037a8d583b" containerName="controller-manager" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.880334 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e8b390-e9b0-4c74-bad7-56037a8d583b" containerName="controller-manager" Dec 12 14:07:29 crc kubenswrapper[4663]: E1212 14:07:29.880349 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87b7b699-68ca-4774-bcff-c3dec801aacf" containerName="route-controller-manager" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.880355 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="87b7b699-68ca-4774-bcff-c3dec801aacf" containerName="route-controller-manager" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.880465 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e8b390-e9b0-4c74-bad7-56037a8d583b" containerName="controller-manager" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.880476 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="87b7b699-68ca-4774-bcff-c3dec801aacf" containerName="route-controller-manager" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.881136 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.882452 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.886355 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2rkr4"] Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.927225 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d853a53-6f16-4db7-be39-c939984126e6-utilities\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.927256 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtds8\" (UniqueName: \"kubernetes.io/projected/2d853a53-6f16-4db7-be39-c939984126e6-kube-api-access-dtds8\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:29 crc kubenswrapper[4663]: I1212 14:07:29.927304 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d853a53-6f16-4db7-be39-c939984126e6-catalog-content\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.027994 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d853a53-6f16-4db7-be39-c939984126e6-catalog-content\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.028068 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d853a53-6f16-4db7-be39-c939984126e6-utilities\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.028088 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtds8\" (UniqueName: \"kubernetes.io/projected/2d853a53-6f16-4db7-be39-c939984126e6-kube-api-access-dtds8\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.028407 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d853a53-6f16-4db7-be39-c939984126e6-catalog-content\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.028492 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d853a53-6f16-4db7-be39-c939984126e6-utilities\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.042107 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtds8\" (UniqueName: \"kubernetes.io/projected/2d853a53-6f16-4db7-be39-c939984126e6-kube-api-access-dtds8\") pod \"certified-operators-2rkr4\" (UID: \"2d853a53-6f16-4db7-be39-c939984126e6\") " pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.077632 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gf9rl"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.078440 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.079475 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.086077 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gf9rl"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.128524 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66941324-ab16-4783-a1db-140f14c7f4d2-catalog-content\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.128565 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66941324-ab16-4783-a1db-140f14c7f4d2-utilities\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.128609 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t68xw\" (UniqueName: \"kubernetes.io/projected/66941324-ab16-4783-a1db-140f14c7f4d2-kube-api-access-t68xw\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.198139 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.229447 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66941324-ab16-4783-a1db-140f14c7f4d2-utilities\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.229479 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66941324-ab16-4783-a1db-140f14c7f4d2-catalog-content\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.229527 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t68xw\" (UniqueName: \"kubernetes.io/projected/66941324-ab16-4783-a1db-140f14c7f4d2-kube-api-access-t68xw\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.230103 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66941324-ab16-4783-a1db-140f14c7f4d2-utilities\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.230301 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66941324-ab16-4783-a1db-140f14c7f4d2-catalog-content\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.244298 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t68xw\" (UniqueName: \"kubernetes.io/projected/66941324-ab16-4783-a1db-140f14c7f4d2-kube-api-access-t68xw\") pod \"community-operators-gf9rl\" (UID: \"66941324-ab16-4783-a1db-140f14c7f4d2\") " pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.348886 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" event={"ID":"c7e8b390-e9b0-4c74-bad7-56037a8d583b","Type":"ContainerDied","Data":"38ae8050405dd696e9145a646d6126e4dfa0ba221ee1e567f7b769003a736891"} Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.348910 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-d98c8bb58-vt5l5" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.349164 4663 scope.go:117] "RemoveContainer" containerID="bfaf475b362ccec24e67bdf524213f026b5ad3d9d13a149083329161953b67bc" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.350484 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.350509 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv" event={"ID":"87b7b699-68ca-4774-bcff-c3dec801aacf","Type":"ContainerDied","Data":"bfdc29c93c3fad4a5efe00a4103bb33404ea23b366546afb7bee600ca3a72749"} Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.357318 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6kscx" event={"ID":"eecf200a-7ab0-4b4e-b914-ec2d18d44505","Type":"ContainerStarted","Data":"782bd6a46ba73a96bf11ed9f8d0f90487d58c7807f552062b99e309a75fd16c5"} Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.361401 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vt5n" event={"ID":"2ded57f0-621c-4daa-b14c-4c3417835f5a","Type":"ContainerStarted","Data":"34bbe6644f2fa36fd5328e923328b573dd780652dccde571bd67895983c7972f"} Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.374349 4663 scope.go:117] "RemoveContainer" containerID="210279b8231caf05aeb338eb9a811a1d43b7e36a5e5c1dbd94961b040717f39a" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.376924 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6kscx" podStartSLOduration=1.6627460840000001 podStartE2EDuration="3.376914175s" podCreationTimestamp="2025-12-12 14:07:27 +0000 UTC" firstStartedPulling="2025-12-12 14:07:28.311861924 +0000 UTC m=+227.737279978" lastFinishedPulling="2025-12-12 14:07:30.026030016 +0000 UTC m=+229.451448069" observedRunningTime="2025-12-12 14:07:30.373813411 +0000 UTC m=+229.799231465" watchObservedRunningTime="2025-12-12 14:07:30.376914175 +0000 UTC m=+229.802332229" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.396814 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-d98c8bb58-vt5l5"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.400675 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-d98c8bb58-vt5l5"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.404564 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.404867 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.405421 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58d9fbf45-mb5rv"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.538164 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2rkr4"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.776834 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gf9rl"] Dec 12 14:07:30 crc kubenswrapper[4663]: W1212 14:07:30.783420 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66941324_ab16_4783_a1db_140f14c7f4d2.slice/crio-a5a9e1b1878aaae08030d37234cd0047e83596bd85dee5a50b2a78cb247f5779 WatchSource:0}: Error finding container a5a9e1b1878aaae08030d37234cd0047e83596bd85dee5a50b2a78cb247f5779: Status 404 returned error can't find the container with id a5a9e1b1878aaae08030d37234cd0047e83596bd85dee5a50b2a78cb247f5779 Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.877957 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87b7b699-68ca-4774-bcff-c3dec801aacf" path="/var/lib/kubelet/pods/87b7b699-68ca-4774-bcff-c3dec801aacf/volumes" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.878760 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7e8b390-e9b0-4c74-bad7-56037a8d583b" path="/var/lib/kubelet/pods/c7e8b390-e9b0-4c74-bad7-56037a8d583b/volumes" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.892373 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.892990 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.893810 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-76db56f75d-wfwsm"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.894292 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896140 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896232 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896344 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896394 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896551 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896620 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896722 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896814 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.896903 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.897404 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.897588 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.897712 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.899285 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.905777 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.910638 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-76db56f75d-wfwsm"] Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935550 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-serving-cert\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935586 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-client-ca\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935631 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-proxy-ca-bundles\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935648 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c10433c-969a-49ae-9e65-f04e3ccded2c-serving-cert\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935762 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsdcb\" (UniqueName: \"kubernetes.io/projected/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-kube-api-access-fsdcb\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935783 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-config\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935826 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-client-ca\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935850 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-config\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:30 crc kubenswrapper[4663]: I1212 14:07:30.935870 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znvbh\" (UniqueName: \"kubernetes.io/projected/8c10433c-969a-49ae-9e65-f04e3ccded2c-kube-api-access-znvbh\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036753 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsdcb\" (UniqueName: \"kubernetes.io/projected/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-kube-api-access-fsdcb\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036793 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-config\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036815 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-client-ca\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036834 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-config\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036857 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znvbh\" (UniqueName: \"kubernetes.io/projected/8c10433c-969a-49ae-9e65-f04e3ccded2c-kube-api-access-znvbh\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036874 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-serving-cert\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036886 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-client-ca\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036906 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-proxy-ca-bundles\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.036919 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c10433c-969a-49ae-9e65-f04e3ccded2c-serving-cert\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.037777 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-client-ca\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.037886 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-config\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.038044 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-config\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.038270 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-client-ca\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.038927 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c10433c-969a-49ae-9e65-f04e3ccded2c-proxy-ca-bundles\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.043251 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c10433c-969a-49ae-9e65-f04e3ccded2c-serving-cert\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.044518 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-serving-cert\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.050073 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znvbh\" (UniqueName: \"kubernetes.io/projected/8c10433c-969a-49ae-9e65-f04e3ccded2c-kube-api-access-znvbh\") pod \"controller-manager-76db56f75d-wfwsm\" (UID: \"8c10433c-969a-49ae-9e65-f04e3ccded2c\") " pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.050164 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsdcb\" (UniqueName: \"kubernetes.io/projected/9cc6b6c8-5be6-4f60-8763-9fcef3705cbc-kube-api-access-fsdcb\") pod \"route-controller-manager-5d8cbd5547-lzc2z\" (UID: \"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc\") " pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.227738 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.232878 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.367505 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gf9rl" event={"ID":"66941324-ab16-4783-a1db-140f14c7f4d2","Type":"ContainerDied","Data":"ed33f977d0311c58caef310155ddbaeaac969d6053354534dc5856b7d538b4cd"} Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.367463 4663 generic.go:334] "Generic (PLEG): container finished" podID="66941324-ab16-4783-a1db-140f14c7f4d2" containerID="ed33f977d0311c58caef310155ddbaeaac969d6053354534dc5856b7d538b4cd" exitCode=0 Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.367616 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gf9rl" event={"ID":"66941324-ab16-4783-a1db-140f14c7f4d2","Type":"ContainerStarted","Data":"a5a9e1b1878aaae08030d37234cd0047e83596bd85dee5a50b2a78cb247f5779"} Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.369437 4663 generic.go:334] "Generic (PLEG): container finished" podID="2ded57f0-621c-4daa-b14c-4c3417835f5a" containerID="34bbe6644f2fa36fd5328e923328b573dd780652dccde571bd67895983c7972f" exitCode=0 Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.369583 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vt5n" event={"ID":"2ded57f0-621c-4daa-b14c-4c3417835f5a","Type":"ContainerDied","Data":"34bbe6644f2fa36fd5328e923328b573dd780652dccde571bd67895983c7972f"} Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.373780 4663 generic.go:334] "Generic (PLEG): container finished" podID="2d853a53-6f16-4db7-be39-c939984126e6" containerID="1f9a9be39ea47e22e20bfa8f491228086a27a671c65ac4a304a5dcc3f6c4637a" exitCode=0 Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.373841 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rkr4" event={"ID":"2d853a53-6f16-4db7-be39-c939984126e6","Type":"ContainerDied","Data":"1f9a9be39ea47e22e20bfa8f491228086a27a671c65ac4a304a5dcc3f6c4637a"} Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.373875 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rkr4" event={"ID":"2d853a53-6f16-4db7-be39-c939984126e6","Type":"ContainerStarted","Data":"cc8820725beca8bfeb373c314358351fe179912507d7bb767af8b03214b10347"} Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.554896 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z"] Dec 12 14:07:31 crc kubenswrapper[4663]: I1212 14:07:31.588931 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-76db56f75d-wfwsm"] Dec 12 14:07:31 crc kubenswrapper[4663]: W1212 14:07:31.594069 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c10433c_969a_49ae_9e65_f04e3ccded2c.slice/crio-5d2815a8b88fe7fcd26c1300fc67a10f36ca320dc05e44ee92bf7c389c0d3daf WatchSource:0}: Error finding container 5d2815a8b88fe7fcd26c1300fc67a10f36ca320dc05e44ee92bf7c389c0d3daf: Status 404 returned error can't find the container with id 5d2815a8b88fe7fcd26c1300fc67a10f36ca320dc05e44ee92bf7c389c0d3daf Dec 12 14:07:32 crc kubenswrapper[4663]: I1212 14:07:32.385771 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" event={"ID":"8c10433c-969a-49ae-9e65-f04e3ccded2c","Type":"ContainerStarted","Data":"5d2815a8b88fe7fcd26c1300fc67a10f36ca320dc05e44ee92bf7c389c0d3daf"} Dec 12 14:07:32 crc kubenswrapper[4663]: I1212 14:07:32.388355 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" event={"ID":"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc","Type":"ContainerStarted","Data":"68cff3ee4a014a9fb3b43fc2f2602b9b1ff9674a0777eb4c735c57b201cd2d2e"} Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.394017 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" event={"ID":"8c10433c-969a-49ae-9e65-f04e3ccded2c","Type":"ContainerStarted","Data":"736a58ccc9b2c7bc8948fb3a78f578de4e00bb44b38353e5d1da15c6f3b7c88c"} Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.394396 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.396959 4663 generic.go:334] "Generic (PLEG): container finished" podID="66941324-ab16-4783-a1db-140f14c7f4d2" containerID="33f51d44c95cdcba5732c2ab08266496c4e71c5664b768488b0e54256c5afeb8" exitCode=0 Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.397018 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gf9rl" event={"ID":"66941324-ab16-4783-a1db-140f14c7f4d2","Type":"ContainerDied","Data":"33f51d44c95cdcba5732c2ab08266496c4e71c5664b768488b0e54256c5afeb8"} Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.398134 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.407404 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vt5n" event={"ID":"2ded57f0-621c-4daa-b14c-4c3417835f5a","Type":"ContainerStarted","Data":"2c8d614dbc0401a5e2acdc51fdf020dc42dd993d7562a2f0c2e646051c21c982"} Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.412285 4663 generic.go:334] "Generic (PLEG): container finished" podID="2d853a53-6f16-4db7-be39-c939984126e6" containerID="dbd6876cb41afe00e06ce2175f3ab4da4342fbae9034e76a75464579a60043c0" exitCode=0 Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.412326 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rkr4" event={"ID":"2d853a53-6f16-4db7-be39-c939984126e6","Type":"ContainerDied","Data":"dbd6876cb41afe00e06ce2175f3ab4da4342fbae9034e76a75464579a60043c0"} Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.417459 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" event={"ID":"9cc6b6c8-5be6-4f60-8763-9fcef3705cbc","Type":"ContainerStarted","Data":"ae6addbc77c5de0665b5c94c73b1c322e341f294819b92b79291f00d04ae7246"} Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.417647 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.421722 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-76db56f75d-wfwsm" podStartSLOduration=4.421709692 podStartE2EDuration="4.421709692s" podCreationTimestamp="2025-12-12 14:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:33.410810519 +0000 UTC m=+232.836228573" watchObservedRunningTime="2025-12-12 14:07:33.421709692 +0000 UTC m=+232.847127745" Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.421965 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.449038 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4vt5n" podStartSLOduration=3.891432696 podStartE2EDuration="6.449025692s" podCreationTimestamp="2025-12-12 14:07:27 +0000 UTC" firstStartedPulling="2025-12-12 14:07:29.333803346 +0000 UTC m=+228.759221390" lastFinishedPulling="2025-12-12 14:07:31.891396331 +0000 UTC m=+231.316814386" observedRunningTime="2025-12-12 14:07:33.447499647 +0000 UTC m=+232.872917700" watchObservedRunningTime="2025-12-12 14:07:33.449025692 +0000 UTC m=+232.874443747" Dec 12 14:07:33 crc kubenswrapper[4663]: I1212 14:07:33.459045 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5d8cbd5547-lzc2z" podStartSLOduration=4.45902971 podStartE2EDuration="4.45902971s" podCreationTimestamp="2025-12-12 14:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:07:33.458827391 +0000 UTC m=+232.884245445" watchObservedRunningTime="2025-12-12 14:07:33.45902971 +0000 UTC m=+232.884447763" Dec 12 14:07:34 crc kubenswrapper[4663]: I1212 14:07:34.426001 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gf9rl" event={"ID":"66941324-ab16-4783-a1db-140f14c7f4d2","Type":"ContainerStarted","Data":"1022dcfdba88d4d2185ed9b4651189d39957275691b3a3d1b4d7cde936c47e7f"} Dec 12 14:07:34 crc kubenswrapper[4663]: I1212 14:07:34.428596 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2rkr4" event={"ID":"2d853a53-6f16-4db7-be39-c939984126e6","Type":"ContainerStarted","Data":"97eb3bc2a2b49d31538e8b85d53caefaf1d7ff3c08e7dd66efdc5252502b2449"} Dec 12 14:07:34 crc kubenswrapper[4663]: I1212 14:07:34.441076 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gf9rl" podStartSLOduration=1.7997223679999999 podStartE2EDuration="4.441063319s" podCreationTimestamp="2025-12-12 14:07:30 +0000 UTC" firstStartedPulling="2025-12-12 14:07:31.368735027 +0000 UTC m=+230.794153080" lastFinishedPulling="2025-12-12 14:07:34.010075976 +0000 UTC m=+233.435494031" observedRunningTime="2025-12-12 14:07:34.440152134 +0000 UTC m=+233.865570188" watchObservedRunningTime="2025-12-12 14:07:34.441063319 +0000 UTC m=+233.866481374" Dec 12 14:07:34 crc kubenswrapper[4663]: I1212 14:07:34.455218 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2rkr4" podStartSLOduration=2.794584001 podStartE2EDuration="5.455205402s" podCreationTimestamp="2025-12-12 14:07:29 +0000 UTC" firstStartedPulling="2025-12-12 14:07:31.374945943 +0000 UTC m=+230.800363997" lastFinishedPulling="2025-12-12 14:07:34.035567343 +0000 UTC m=+233.460985398" observedRunningTime="2025-12-12 14:07:34.453702799 +0000 UTC m=+233.879120854" watchObservedRunningTime="2025-12-12 14:07:34.455205402 +0000 UTC m=+233.880623456" Dec 12 14:07:37 crc kubenswrapper[4663]: I1212 14:07:37.796911 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:37 crc kubenswrapper[4663]: I1212 14:07:37.797198 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:37 crc kubenswrapper[4663]: I1212 14:07:37.831135 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:37 crc kubenswrapper[4663]: I1212 14:07:37.990998 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:37 crc kubenswrapper[4663]: I1212 14:07:37.991050 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:38 crc kubenswrapper[4663]: I1212 14:07:38.026157 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:38 crc kubenswrapper[4663]: I1212 14:07:38.472177 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4vt5n" Dec 12 14:07:38 crc kubenswrapper[4663]: I1212 14:07:38.472952 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6kscx" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.033407 4663 file.go:109] "Unable to process watch event" err="can't process config file \"/etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml\": /etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml: couldn't parse as pod(Object 'Kind' is missing in 'null'), please check config file" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.034343 4663 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035024 4663 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035146 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035458 4663 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035553 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035563 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035571 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035578 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035587 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035592 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035603 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035609 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035617 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035623 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035629 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035635 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035709 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035718 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035726 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035733 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035753 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035762 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.035833 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.035839 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.055913 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.129772 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.129811 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.129854 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.129897 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.129918 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.130090 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.130225 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.130249 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.198659 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.198702 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.228736 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231506 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231541 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231570 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231590 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231607 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231634 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231665 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231664 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231681 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231700 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231737 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231754 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231764 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231738 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231796 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.231817 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.259061 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.355762 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.406343 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.406390 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.442403 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.462350 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7b9f6d62680101146d6e8caf6d245382796f86c7d264936318d61b4dca643397"} Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.466785 4663 generic.go:334] "Generic (PLEG): container finished" podID="8c7619fd-25f9-4050-8b13-3496ed370665" containerID="5b4addbe81d4764176e84d17d9596c900564d36e4b808f383c6260767d373c84" exitCode=0 Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.466922 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c7619fd-25f9-4050-8b13-3496ed370665","Type":"ContainerDied","Data":"5b4addbe81d4764176e84d17d9596c900564d36e4b808f383c6260767d373c84"} Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.468676 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a" gracePeriod=15 Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.468779 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c" gracePeriod=15 Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.468767 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a" gracePeriod=15 Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.468786 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50" gracePeriod=15 Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.468857 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c" gracePeriod=15 Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.486797 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.504956 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gf9rl" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.506397 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.508385 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2rkr4" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.508711 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.508917 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:40 crc kubenswrapper[4663]: E1212 14:07:40.594762 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.26.204:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18807cf87b762854 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Created,Message:Created container startup-monitor,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-12 14:07:40.594309204 +0000 UTC m=+240.019727259,LastTimestamp:2025-12-12 14:07:40.594309204 +0000 UTC m=+240.019727259,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.873191 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.873518 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:40 crc kubenswrapper[4663]: I1212 14:07:40.873686 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.472909 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"72b0f27c038ffa2d8e0f30152bd2d2c756f23f0727c4bb736f8b17dec787f9ab"} Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.473943 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.474299 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.474680 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.474974 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.475032 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.476157 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.476914 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c" exitCode=2 Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.727864 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.728367 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.728658 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.729385 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.729680 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.847836 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-var-lock\") pod \"8c7619fd-25f9-4050-8b13-3496ed370665\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.847869 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-kubelet-dir\") pod \"8c7619fd-25f9-4050-8b13-3496ed370665\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.847904 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7619fd-25f9-4050-8b13-3496ed370665-kube-api-access\") pod \"8c7619fd-25f9-4050-8b13-3496ed370665\" (UID: \"8c7619fd-25f9-4050-8b13-3496ed370665\") " Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.848080 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8c7619fd-25f9-4050-8b13-3496ed370665" (UID: "8c7619fd-25f9-4050-8b13-3496ed370665"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.848183 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-var-lock" (OuterVolumeSpecName: "var-lock") pod "8c7619fd-25f9-4050-8b13-3496ed370665" (UID: "8c7619fd-25f9-4050-8b13-3496ed370665"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.852944 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c7619fd-25f9-4050-8b13-3496ed370665-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8c7619fd-25f9-4050-8b13-3496ed370665" (UID: "8c7619fd-25f9-4050-8b13-3496ed370665"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.949459 4663 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-var-lock\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.949485 4663 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c7619fd-25f9-4050-8b13-3496ed370665-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:41 crc kubenswrapper[4663]: I1212 14:07:41.949495 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7619fd-25f9-4050-8b13-3496ed370665-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.489977 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.491006 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.491556 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a" exitCode=0 Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.491577 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c" exitCode=0 Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.491587 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50" exitCode=0 Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.491638 4663 scope.go:117] "RemoveContainer" containerID="115d48f255f010c8a2627939c2b551f1d0b8f8eb14e8fe7146ecacc0645a92f4" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.493333 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.493461 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"8c7619fd-25f9-4050-8b13-3496ed370665","Type":"ContainerDied","Data":"cf215106bddd754b2670354bb7ad5c36983b14ceeada8969de33a8e5c50d2030"} Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.493489 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf215106bddd754b2670354bb7ad5c36983b14ceeada8969de33a8e5c50d2030" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.520489 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.521002 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.521454 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.521769 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.753802 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.754788 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.755218 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.755539 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.755801 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.756016 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.756265 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.758607 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.758684 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.758698 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.758720 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.758825 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.758940 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.759131 4663 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.759146 4663 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.759154 4663 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:07:42 crc kubenswrapper[4663]: I1212 14:07:42.874729 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.498926 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.499508 4663 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a" exitCode=0 Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.499546 4663 scope.go:117] "RemoveContainer" containerID="324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.499568 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.499910 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.500527 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.500699 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.500937 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.502093 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.502465 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.502659 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.503471 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.503855 4663 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.504251 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.511733 4663 scope.go:117] "RemoveContainer" containerID="51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.521309 4663 scope.go:117] "RemoveContainer" containerID="1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.530457 4663 scope.go:117] "RemoveContainer" containerID="c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.538707 4663 scope.go:117] "RemoveContainer" containerID="355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.550730 4663 scope.go:117] "RemoveContainer" containerID="72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.562803 4663 scope.go:117] "RemoveContainer" containerID="324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a" Dec 12 14:07:43 crc kubenswrapper[4663]: E1212 14:07:43.563087 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\": container with ID starting with 324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a not found: ID does not exist" containerID="324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563124 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a"} err="failed to get container status \"324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\": rpc error: code = NotFound desc = could not find container \"324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a\": container with ID starting with 324c18c7cd4c8b2ad4e66c5c6b9f140ccd2abf661cb3de4ec6bdb161744d981a not found: ID does not exist" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563147 4663 scope.go:117] "RemoveContainer" containerID="51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c" Dec 12 14:07:43 crc kubenswrapper[4663]: E1212 14:07:43.563378 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\": container with ID starting with 51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c not found: ID does not exist" containerID="51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563421 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c"} err="failed to get container status \"51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\": rpc error: code = NotFound desc = could not find container \"51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c\": container with ID starting with 51825925630a54f2971867cefdf009a2a9b5e140173c3f8e0e23edd37bfc6b6c not found: ID does not exist" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563443 4663 scope.go:117] "RemoveContainer" containerID="1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50" Dec 12 14:07:43 crc kubenswrapper[4663]: E1212 14:07:43.563674 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\": container with ID starting with 1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50 not found: ID does not exist" containerID="1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563699 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50"} err="failed to get container status \"1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\": rpc error: code = NotFound desc = could not find container \"1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50\": container with ID starting with 1f2c3cdd007c14b4f6fe20f5d83604551c4c585fe46c96d8864f5d940de8bc50 not found: ID does not exist" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563715 4663 scope.go:117] "RemoveContainer" containerID="c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c" Dec 12 14:07:43 crc kubenswrapper[4663]: E1212 14:07:43.563956 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\": container with ID starting with c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c not found: ID does not exist" containerID="c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563983 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c"} err="failed to get container status \"c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\": rpc error: code = NotFound desc = could not find container \"c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c\": container with ID starting with c48d5e6c5ae67d8d9ea9107a6be487919e6d90f2494f2cccbf5c5839e808fe0c not found: ID does not exist" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.563998 4663 scope.go:117] "RemoveContainer" containerID="355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a" Dec 12 14:07:43 crc kubenswrapper[4663]: E1212 14:07:43.564211 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\": container with ID starting with 355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a not found: ID does not exist" containerID="355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.564231 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a"} err="failed to get container status \"355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\": rpc error: code = NotFound desc = could not find container \"355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a\": container with ID starting with 355102e372ff0bb27d58696c10368ee248ba98b07fbbf94db4b92b9b032baf4a not found: ID does not exist" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.564245 4663 scope.go:117] "RemoveContainer" containerID="72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3" Dec 12 14:07:43 crc kubenswrapper[4663]: E1212 14:07:43.564523 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\": container with ID starting with 72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3 not found: ID does not exist" containerID="72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3" Dec 12 14:07:43 crc kubenswrapper[4663]: I1212 14:07:43.564561 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3"} err="failed to get container status \"72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\": rpc error: code = NotFound desc = could not find container \"72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3\": container with ID starting with 72071244a39aa8b088529b0c48a421eab351ab0695a13b5f7883575017185ed3 not found: ID does not exist" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.191983 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.192481 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.192778 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.193026 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.193264 4663 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:45 crc kubenswrapper[4663]: I1212 14:07:45.193289 4663 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.193512 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="200ms" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.394138 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="400ms" Dec 12 14:07:45 crc kubenswrapper[4663]: E1212 14:07:45.795520 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="800ms" Dec 12 14:07:46 crc kubenswrapper[4663]: E1212 14:07:46.596249 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="1.6s" Dec 12 14:07:48 crc kubenswrapper[4663]: E1212 14:07:48.197005 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="3.2s" Dec 12 14:07:49 crc kubenswrapper[4663]: E1212 14:07:49.875971 4663 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.26.204:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18807cf87b762854 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Created,Message:Created container startup-monitor,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-12 14:07:40.594309204 +0000 UTC m=+240.019727259,LastTimestamp:2025-12-12 14:07:40.594309204 +0000 UTC m=+240.019727259,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 12 14:07:50 crc kubenswrapper[4663]: I1212 14:07:50.870888 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:50 crc kubenswrapper[4663]: I1212 14:07:50.871805 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:50 crc kubenswrapper[4663]: I1212 14:07:50.872140 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:50 crc kubenswrapper[4663]: I1212 14:07:50.872415 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:51 crc kubenswrapper[4663]: E1212 14:07:51.398197 4663 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.204:6443: connect: connection refused" interval="6.4s" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.869905 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.871128 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.872304 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.872650 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.872891 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.880432 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.880455 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:07:53 crc kubenswrapper[4663]: E1212 14:07:53.880704 4663 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:53 crc kubenswrapper[4663]: I1212 14:07:53.881061 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:53 crc kubenswrapper[4663]: W1212 14:07:53.894367 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-47ee726f197936101a37ad4b4e786050d4446c0ffd75ca1cd55d58508eef00e3 WatchSource:0}: Error finding container 47ee726f197936101a37ad4b4e786050d4446c0ffd75ca1cd55d58508eef00e3: Status 404 returned error can't find the container with id 47ee726f197936101a37ad4b4e786050d4446c0ffd75ca1cd55d58508eef00e3 Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.541103 4663 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="3f02ed9c7244b925c3d490e32bc5f7ddeedc00fb8c0db219d5dd1e3727abe5bc" exitCode=0 Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.541162 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"3f02ed9c7244b925c3d490e32bc5f7ddeedc00fb8c0db219d5dd1e3727abe5bc"} Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.541313 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"47ee726f197936101a37ad4b4e786050d4446c0ffd75ca1cd55d58508eef00e3"} Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.541521 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.541532 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.541920 4663 status_manager.go:851] "Failed to get status for pod" podUID="66941324-ab16-4783-a1db-140f14c7f4d2" pod="openshift-marketplace/community-operators-gf9rl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gf9rl\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:54 crc kubenswrapper[4663]: E1212 14:07:54.541923 4663 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.542260 4663 status_manager.go:851] "Failed to get status for pod" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.542533 4663 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:54 crc kubenswrapper[4663]: I1212 14:07:54.542799 4663 status_manager.go:851] "Failed to get status for pod" podUID="2d853a53-6f16-4db7-be39-c939984126e6" pod="openshift-marketplace/certified-operators-2rkr4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-2rkr4\": dial tcp 192.168.26.204:6443: connect: connection refused" Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.547106 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.547304 4663 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293" exitCode=1 Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.547400 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293"} Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.547813 4663 scope.go:117] "RemoveContainer" containerID="7ff2199afa6cc9d20dc20142876cd68eeb5c9cc694e798c202cb866e67c4c293" Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549700 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5192727608dd04adcc3b8b286b6370d903099bd779c3a253f909a99f0b91a04a"} Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549722 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c95a36245f3272595d6e0fe16e24d79f3ff3b546dfcc60c41249debc7c2baeeb"} Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549733 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"60c80b3bc226b4d5d658d6f2535d6ab329ed5e4ad002e6440f6733c2f2807335"} Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549742 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0e435a85cc3507598b462d721a8a92a2bb75345a4a9974a29e1483c47a37c5d7"} Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549764 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"710c62121f668a6956facca307174255fbd3d88205370b64c7988a64826828e2"} Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549897 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549926 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:07:55 crc kubenswrapper[4663]: I1212 14:07:55.549938 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:07:56 crc kubenswrapper[4663]: I1212 14:07:56.486080 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:07:56 crc kubenswrapper[4663]: I1212 14:07:56.555333 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 12 14:07:56 crc kubenswrapper[4663]: I1212 14:07:56.555374 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6731583f5c1a10f116ffc348268a4fa83e9d01bf003a54231cecf097a9cb35ad"} Dec 12 14:07:58 crc kubenswrapper[4663]: I1212 14:07:58.881490 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:58 crc kubenswrapper[4663]: I1212 14:07:58.881680 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:58 crc kubenswrapper[4663]: I1212 14:07:58.884959 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:07:59 crc kubenswrapper[4663]: I1212 14:07:59.614276 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:07:59 crc kubenswrapper[4663]: I1212 14:07:59.617022 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:08:00 crc kubenswrapper[4663]: I1212 14:08:00.568443 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:08:00 crc kubenswrapper[4663]: I1212 14:08:00.679167 4663 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:08:00 crc kubenswrapper[4663]: I1212 14:08:00.879047 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fe9d3f10-ac44-4c93-b1a0-7f8c2dbe0aa4" Dec 12 14:08:01 crc kubenswrapper[4663]: I1212 14:08:01.571556 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:08:01 crc kubenswrapper[4663]: I1212 14:08:01.571979 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:08:01 crc kubenswrapper[4663]: I1212 14:08:01.573720 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fe9d3f10-ac44-4c93-b1a0-7f8c2dbe0aa4" Dec 12 14:08:01 crc kubenswrapper[4663]: I1212 14:08:01.574908 4663 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://710c62121f668a6956facca307174255fbd3d88205370b64c7988a64826828e2" Dec 12 14:08:01 crc kubenswrapper[4663]: I1212 14:08:01.574988 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:08:02 crc kubenswrapper[4663]: I1212 14:08:02.574778 4663 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:08:02 crc kubenswrapper[4663]: I1212 14:08:02.574804 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="804f0066-151b-4960-aa1e-fe6346e19f98" Dec 12 14:08:02 crc kubenswrapper[4663]: I1212 14:08:02.577660 4663 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fe9d3f10-ac44-4c93-b1a0-7f8c2dbe0aa4" Dec 12 14:08:06 crc kubenswrapper[4663]: I1212 14:08:06.489740 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 12 14:08:11 crc kubenswrapper[4663]: I1212 14:08:11.285964 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 12 14:08:11 crc kubenswrapper[4663]: I1212 14:08:11.410246 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 12 14:08:11 crc kubenswrapper[4663]: I1212 14:08:11.516010 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 12 14:08:11 crc kubenswrapper[4663]: I1212 14:08:11.517681 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.158396 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.378134 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.378957 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.482621 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.555969 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.567992 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.682441 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 12 14:08:12 crc kubenswrapper[4663]: I1212 14:08:12.761547 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.109530 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.122243 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.136285 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.389982 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.582371 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.666688 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.907943 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 12 14:08:13 crc kubenswrapper[4663]: I1212 14:08:13.949052 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.042026 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.123972 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.248098 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.269014 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.448147 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.502965 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.525978 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.558038 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.768852 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.849052 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.851771 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.862618 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.868202 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.923798 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.924866 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.930538 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 12 14:08:14 crc kubenswrapper[4663]: I1212 14:08:14.955294 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.021415 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.131092 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.144979 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.317712 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.340647 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.477171 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.478543 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.484170 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.542036 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.577163 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.584962 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.693829 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.758965 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.930301 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.947508 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 12 14:08:15 crc kubenswrapper[4663]: I1212 14:08:15.968670 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.165362 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.216535 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.269849 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.306151 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.376263 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.475443 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.476920 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.560916 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.589013 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.610817 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.639542 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.724940 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.753554 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.762834 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.778644 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.820494 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.861707 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.918255 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.975665 4663 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.976566 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 12 14:08:16 crc kubenswrapper[4663]: I1212 14:08:16.977624 4663 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.008525 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.057842 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.075463 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.116503 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.152663 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.154254 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.156479 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.185906 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.186014 4663 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.193647 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.213899 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.234639 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.272783 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.309735 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.532067 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.548407 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.674288 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.680890 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.692021 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.764360 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.817262 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.887232 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.928368 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 12 14:08:17 crc kubenswrapper[4663]: I1212 14:08:17.940492 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.059518 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.195828 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.198367 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.206109 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.306283 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.395199 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.440985 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.488487 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.495949 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.558471 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.583459 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.606346 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.893443 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.897861 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 12 14:08:18 crc kubenswrapper[4663]: I1212 14:08:18.936863 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.003326 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.005921 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.011246 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.047157 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.117779 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.160025 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.183443 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.309047 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.689464 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.735621 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.801570 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.819508 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.883984 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 12 14:08:19 crc kubenswrapper[4663]: I1212 14:08:19.892801 4663 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.047496 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.061324 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.075136 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.091893 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.121846 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.163905 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.198908 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.208970 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.270970 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.321858 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.372024 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.469493 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.517223 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.522512 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.534336 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.650109 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.654928 4663 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.686037 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.751099 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.813610 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.822579 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.882068 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 12 14:08:20 crc kubenswrapper[4663]: I1212 14:08:20.997635 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.026417 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.094474 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.193849 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.210556 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.236876 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.258797 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.270525 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.401201 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.527680 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.556626 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.570252 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.703688 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.774136 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.775719 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.878208 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 12 14:08:21 crc kubenswrapper[4663]: I1212 14:08:21.898837 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.124074 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.146924 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.146965 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.232612 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.368344 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.511787 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.535988 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.560586 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.588306 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.649189 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.691881 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.704874 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.709802 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.772956 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.833423 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.869388 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 12 14:08:22 crc kubenswrapper[4663]: I1212 14:08:22.941239 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.124503 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.139602 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.155444 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.198626 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.226459 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.239587 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.273603 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.343474 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.352408 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.390339 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.425630 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.435890 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.445385 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.487297 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.688258 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.708651 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.767487 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 12 14:08:23 crc kubenswrapper[4663]: I1212 14:08:23.912257 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.135690 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.143788 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.220422 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.320273 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.390132 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.418119 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.461184 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.473052 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.485517 4663 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.488336 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=44.488322295 podStartE2EDuration="44.488322295s" podCreationTimestamp="2025-12-12 14:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:08:00.72476053 +0000 UTC m=+260.150178583" watchObservedRunningTime="2025-12-12 14:08:24.488322295 +0000 UTC m=+283.913740350" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.488724 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.488782 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.491582 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.500536 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=24.500522841 podStartE2EDuration="24.500522841s" podCreationTimestamp="2025-12-12 14:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:08:24.499798159 +0000 UTC m=+283.925216213" watchObservedRunningTime="2025-12-12 14:08:24.500522841 +0000 UTC m=+283.925940896" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.589816 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.591726 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.603390 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.709841 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.813400 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.819030 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.953401 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 12 14:08:24 crc kubenswrapper[4663]: I1212 14:08:24.993349 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.034393 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.056194 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.081148 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.151447 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.336281 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.561578 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.580166 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.707164 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.852301 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.852358 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 12 14:08:25 crc kubenswrapper[4663]: I1212 14:08:25.997564 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.047928 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.091405 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.142881 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.266739 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.430096 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.484915 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.516133 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.575220 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.705413 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 12 14:08:26 crc kubenswrapper[4663]: I1212 14:08:26.802261 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.210173 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.239046 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.362239 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.494432 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.608841 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.701974 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.733137 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 12 14:08:27 crc kubenswrapper[4663]: I1212 14:08:27.961878 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 12 14:08:28 crc kubenswrapper[4663]: I1212 14:08:28.184832 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 12 14:08:28 crc kubenswrapper[4663]: I1212 14:08:28.268116 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 12 14:08:28 crc kubenswrapper[4663]: I1212 14:08:28.479494 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 12 14:08:29 crc kubenswrapper[4663]: I1212 14:08:29.252277 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 12 14:08:30 crc kubenswrapper[4663]: I1212 14:08:30.084095 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 12 14:08:33 crc kubenswrapper[4663]: I1212 14:08:33.415924 4663 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 12 14:08:33 crc kubenswrapper[4663]: I1212 14:08:33.416138 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://72b0f27c038ffa2d8e0f30152bd2d2c756f23f0727c4bb736f8b17dec787f9ab" gracePeriod=5 Dec 12 14:08:38 crc kubenswrapper[4663]: I1212 14:08:38.715111 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 12 14:08:38 crc kubenswrapper[4663]: I1212 14:08:38.715280 4663 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="72b0f27c038ffa2d8e0f30152bd2d2c756f23f0727c4bb736f8b17dec787f9ab" exitCode=137 Dec 12 14:08:38 crc kubenswrapper[4663]: I1212 14:08:38.964438 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 12 14:08:38 crc kubenswrapper[4663]: I1212 14:08:38.964509 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129082 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129140 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129166 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129206 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129264 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129275 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129302 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129316 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129404 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129510 4663 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129523 4663 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129533 4663 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.129541 4663 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.134332 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.230141 4663 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.720896 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.721057 4663 scope.go:117] "RemoveContainer" containerID="72b0f27c038ffa2d8e0f30152bd2d2c756f23f0727c4bb736f8b17dec787f9ab" Dec 12 14:08:39 crc kubenswrapper[4663]: I1212 14:08:39.721113 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.771632 4663 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.874319 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.874528 4663 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.882573 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.882603 4663 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c472f9a6-5ef9-49da-a02d-d04c2dbb1265" Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.885271 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 12 14:08:40 crc kubenswrapper[4663]: I1212 14:08:40.885298 4663 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c472f9a6-5ef9-49da-a02d-d04c2dbb1265" Dec 12 14:09:06 crc kubenswrapper[4663]: I1212 14:09:06.129181 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:09:06 crc kubenswrapper[4663]: I1212 14:09:06.129567 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.757102 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8g69c"] Dec 12 14:09:10 crc kubenswrapper[4663]: E1212 14:09:10.758159 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.758187 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 12 14:09:10 crc kubenswrapper[4663]: E1212 14:09:10.758202 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" containerName="installer" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.758208 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" containerName="installer" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.758292 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.758312 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c7619fd-25f9-4050-8b13-3496ed370665" containerName="installer" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.758637 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.764652 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8g69c"] Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.871876 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77nmz\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-kube-api-access-77nmz\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.871926 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cc7fd694-837d-4be6-8c69-ecb61bd0475f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.871961 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.871987 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-bound-sa-token\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.872005 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc7fd694-837d-4be6-8c69-ecb61bd0475f-trusted-ca\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.872024 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cc7fd694-837d-4be6-8c69-ecb61bd0475f-registry-certificates\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.872060 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-registry-tls\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.872190 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cc7fd694-837d-4be6-8c69-ecb61bd0475f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.894161 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.973886 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-registry-tls\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.973926 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cc7fd694-837d-4be6-8c69-ecb61bd0475f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.973955 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77nmz\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-kube-api-access-77nmz\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.973984 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cc7fd694-837d-4be6-8c69-ecb61bd0475f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.974021 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-bound-sa-token\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.974038 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc7fd694-837d-4be6-8c69-ecb61bd0475f-trusted-ca\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.974053 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cc7fd694-837d-4be6-8c69-ecb61bd0475f-registry-certificates\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.974368 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cc7fd694-837d-4be6-8c69-ecb61bd0475f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.976253 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc7fd694-837d-4be6-8c69-ecb61bd0475f-trusted-ca\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.976785 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cc7fd694-837d-4be6-8c69-ecb61bd0475f-registry-certificates\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.978042 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cc7fd694-837d-4be6-8c69-ecb61bd0475f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.978047 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-registry-tls\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.986956 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77nmz\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-kube-api-access-77nmz\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:10 crc kubenswrapper[4663]: I1212 14:09:10.987015 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc7fd694-837d-4be6-8c69-ecb61bd0475f-bound-sa-token\") pod \"image-registry-66df7c8f76-8g69c\" (UID: \"cc7fd694-837d-4be6-8c69-ecb61bd0475f\") " pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:11 crc kubenswrapper[4663]: I1212 14:09:11.070402 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:11 crc kubenswrapper[4663]: I1212 14:09:11.400417 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8g69c"] Dec 12 14:09:11 crc kubenswrapper[4663]: I1212 14:09:11.843244 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" event={"ID":"cc7fd694-837d-4be6-8c69-ecb61bd0475f","Type":"ContainerStarted","Data":"7bf57075b604da667ac3673bad6324484ab38c39d6c9f5bd1cf949eaaf48f7b8"} Dec 12 14:09:11 crc kubenswrapper[4663]: I1212 14:09:11.843525 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:11 crc kubenswrapper[4663]: I1212 14:09:11.843555 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" event={"ID":"cc7fd694-837d-4be6-8c69-ecb61bd0475f","Type":"ContainerStarted","Data":"9ca8010880d2ac14ee0f24c313e2145bede828fd23904ce8fb3f91739ca81757"} Dec 12 14:09:11 crc kubenswrapper[4663]: I1212 14:09:11.858866 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" podStartSLOduration=1.85885333 podStartE2EDuration="1.85885333s" podCreationTimestamp="2025-12-12 14:09:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:09:11.856716591 +0000 UTC m=+331.282134646" watchObservedRunningTime="2025-12-12 14:09:11.85885333 +0000 UTC m=+331.284271384" Dec 12 14:09:31 crc kubenswrapper[4663]: I1212 14:09:31.074219 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-8g69c" Dec 12 14:09:31 crc kubenswrapper[4663]: I1212 14:09:31.107165 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ns242"] Dec 12 14:09:36 crc kubenswrapper[4663]: I1212 14:09:36.128947 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:09:36 crc kubenswrapper[4663]: I1212 14:09:36.129168 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.131251 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" podUID="191340bd-661f-4987-b348-20887c35f540" containerName="registry" containerID="cri-o://81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e" gracePeriod=30 Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.391933 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.559927 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560024 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-trusted-ca\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560053 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-registry-tls\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560073 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-registry-certificates\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560091 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-bound-sa-token\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560129 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/191340bd-661f-4987-b348-20887c35f540-installation-pull-secrets\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560151 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcjlf\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-kube-api-access-hcjlf\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560170 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/191340bd-661f-4987-b348-20887c35f540-ca-trust-extracted\") pod \"191340bd-661f-4987-b348-20887c35f540\" (UID: \"191340bd-661f-4987-b348-20887c35f540\") " Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560911 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.560926 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.564355 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-kube-api-access-hcjlf" (OuterVolumeSpecName: "kube-api-access-hcjlf") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "kube-api-access-hcjlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.564846 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.565036 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/191340bd-661f-4987-b348-20887c35f540-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.565160 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.568289 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.572981 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/191340bd-661f-4987-b348-20887c35f540-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "191340bd-661f-4987-b348-20887c35f540" (UID: "191340bd-661f-4987-b348-20887c35f540"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.660961 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.660999 4663 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.661009 4663 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/191340bd-661f-4987-b348-20887c35f540-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.661019 4663 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.661027 4663 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/191340bd-661f-4987-b348-20887c35f540-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.661034 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcjlf\" (UniqueName: \"kubernetes.io/projected/191340bd-661f-4987-b348-20887c35f540-kube-api-access-hcjlf\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:56 crc kubenswrapper[4663]: I1212 14:09:56.661041 4663 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/191340bd-661f-4987-b348-20887c35f540-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.005444 4663 generic.go:334] "Generic (PLEG): container finished" podID="191340bd-661f-4987-b348-20887c35f540" containerID="81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e" exitCode=0 Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.005478 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" event={"ID":"191340bd-661f-4987-b348-20887c35f540","Type":"ContainerDied","Data":"81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e"} Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.005519 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" event={"ID":"191340bd-661f-4987-b348-20887c35f540","Type":"ContainerDied","Data":"8aa6692b218cebfd63748b4934c162028e95b4b079adc7925f49809bae16ace6"} Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.005537 4663 scope.go:117] "RemoveContainer" containerID="81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e" Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.005794 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ns242" Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.016771 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ns242"] Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.018575 4663 scope.go:117] "RemoveContainer" containerID="81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e" Dec 12 14:09:57 crc kubenswrapper[4663]: E1212 14:09:57.018987 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e\": container with ID starting with 81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e not found: ID does not exist" containerID="81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e" Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.019070 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e"} err="failed to get container status \"81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e\": rpc error: code = NotFound desc = could not find container \"81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e\": container with ID starting with 81e073cf23a7dadc0eb65e79b8749938cbf6d5ca142226972cb991419089b01e not found: ID does not exist" Dec 12 14:09:57 crc kubenswrapper[4663]: I1212 14:09:57.019544 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ns242"] Dec 12 14:09:58 crc kubenswrapper[4663]: I1212 14:09:58.874944 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="191340bd-661f-4987-b348-20887c35f540" path="/var/lib/kubelet/pods/191340bd-661f-4987-b348-20887c35f540/volumes" Dec 12 14:10:06 crc kubenswrapper[4663]: I1212 14:10:06.128930 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:10:06 crc kubenswrapper[4663]: I1212 14:10:06.129291 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:10:06 crc kubenswrapper[4663]: I1212 14:10:06.129327 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:10:06 crc kubenswrapper[4663]: I1212 14:10:06.129774 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1e9ca5f2aeb91abe2527bcfb8a9ce3821e586c8032624f0a0df36fe70c76b8ae"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:10:06 crc kubenswrapper[4663]: I1212 14:10:06.129822 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://1e9ca5f2aeb91abe2527bcfb8a9ce3821e586c8032624f0a0df36fe70c76b8ae" gracePeriod=600 Dec 12 14:10:07 crc kubenswrapper[4663]: I1212 14:10:07.046088 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="1e9ca5f2aeb91abe2527bcfb8a9ce3821e586c8032624f0a0df36fe70c76b8ae" exitCode=0 Dec 12 14:10:07 crc kubenswrapper[4663]: I1212 14:10:07.046165 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"1e9ca5f2aeb91abe2527bcfb8a9ce3821e586c8032624f0a0df36fe70c76b8ae"} Dec 12 14:10:07 crc kubenswrapper[4663]: I1212 14:10:07.046446 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"fdd804f3d5e00648af4f1a04594dd26f7076d16b07130cdcecfe1b8605b9a2f9"} Dec 12 14:10:07 crc kubenswrapper[4663]: I1212 14:10:07.046466 4663 scope.go:117] "RemoveContainer" containerID="f26449d21ac62d81fa3cfd7d6a9c239f2aa575a5a8e6509da5047bd20b7938d5" Dec 12 14:12:06 crc kubenswrapper[4663]: I1212 14:12:06.128988 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:12:06 crc kubenswrapper[4663]: I1212 14:12:06.129505 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:12:36 crc kubenswrapper[4663]: I1212 14:12:36.128874 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:12:36 crc kubenswrapper[4663]: I1212 14:12:36.129247 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.171183 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-h4msx"] Dec 12 14:12:48 crc kubenswrapper[4663]: E1212 14:12:48.171699 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="191340bd-661f-4987-b348-20887c35f540" containerName="registry" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.171711 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="191340bd-661f-4987-b348-20887c35f540" containerName="registry" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.171805 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="191340bd-661f-4987-b348-20887c35f540" containerName="registry" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.172103 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.174128 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.174336 4663 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-qlfkk" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.175195 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.179767 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-h4msx"] Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.181714 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-647lg\" (UniqueName: \"kubernetes.io/projected/ba4281a0-b789-4914-8ad6-aeea0b349c7b-kube-api-access-647lg\") pod \"cert-manager-cainjector-7f985d654d-h4msx\" (UID: \"ba4281a0-b789-4914-8ad6-aeea0b349c7b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.182847 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-tw7gw"] Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.183471 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-tw7gw" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.184894 4663 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-924cn" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.185231 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-lgmg9"] Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.187890 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.189428 4663 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-6f2pp" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.192784 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-lgmg9"] Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.198151 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-tw7gw"] Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.283357 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-647lg\" (UniqueName: \"kubernetes.io/projected/ba4281a0-b789-4914-8ad6-aeea0b349c7b-kube-api-access-647lg\") pod \"cert-manager-cainjector-7f985d654d-h4msx\" (UID: \"ba4281a0-b789-4914-8ad6-aeea0b349c7b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.283431 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4995h\" (UniqueName: \"kubernetes.io/projected/06657a47-365d-4912-9444-21c7ead0ee4a-kube-api-access-4995h\") pod \"cert-manager-5b446d88c5-tw7gw\" (UID: \"06657a47-365d-4912-9444-21c7ead0ee4a\") " pod="cert-manager/cert-manager-5b446d88c5-tw7gw" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.283465 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsdtt\" (UniqueName: \"kubernetes.io/projected/8faa1738-9a0b-4e74-99ad-c7ea18277b98-kube-api-access-xsdtt\") pod \"cert-manager-webhook-5655c58dd6-lgmg9\" (UID: \"8faa1738-9a0b-4e74-99ad-c7ea18277b98\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.297904 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-647lg\" (UniqueName: \"kubernetes.io/projected/ba4281a0-b789-4914-8ad6-aeea0b349c7b-kube-api-access-647lg\") pod \"cert-manager-cainjector-7f985d654d-h4msx\" (UID: \"ba4281a0-b789-4914-8ad6-aeea0b349c7b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.383944 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4995h\" (UniqueName: \"kubernetes.io/projected/06657a47-365d-4912-9444-21c7ead0ee4a-kube-api-access-4995h\") pod \"cert-manager-5b446d88c5-tw7gw\" (UID: \"06657a47-365d-4912-9444-21c7ead0ee4a\") " pod="cert-manager/cert-manager-5b446d88c5-tw7gw" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.383995 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsdtt\" (UniqueName: \"kubernetes.io/projected/8faa1738-9a0b-4e74-99ad-c7ea18277b98-kube-api-access-xsdtt\") pod \"cert-manager-webhook-5655c58dd6-lgmg9\" (UID: \"8faa1738-9a0b-4e74-99ad-c7ea18277b98\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.396483 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4995h\" (UniqueName: \"kubernetes.io/projected/06657a47-365d-4912-9444-21c7ead0ee4a-kube-api-access-4995h\") pod \"cert-manager-5b446d88c5-tw7gw\" (UID: \"06657a47-365d-4912-9444-21c7ead0ee4a\") " pod="cert-manager/cert-manager-5b446d88c5-tw7gw" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.397306 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsdtt\" (UniqueName: \"kubernetes.io/projected/8faa1738-9a0b-4e74-99ad-c7ea18277b98-kube-api-access-xsdtt\") pod \"cert-manager-webhook-5655c58dd6-lgmg9\" (UID: \"8faa1738-9a0b-4e74-99ad-c7ea18277b98\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.483259 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.493387 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-tw7gw" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.500258 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.673809 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-tw7gw"] Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.681618 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.699270 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-lgmg9"] Dec 12 14:12:48 crc kubenswrapper[4663]: W1212 14:12:48.701513 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8faa1738_9a0b_4e74_99ad_c7ea18277b98.slice/crio-a5af99a7578bb14e956b9310830ab7b4da9aeae66c4dc7c2974b21dee06acc1b WatchSource:0}: Error finding container a5af99a7578bb14e956b9310830ab7b4da9aeae66c4dc7c2974b21dee06acc1b: Status 404 returned error can't find the container with id a5af99a7578bb14e956b9310830ab7b4da9aeae66c4dc7c2974b21dee06acc1b Dec 12 14:12:48 crc kubenswrapper[4663]: I1212 14:12:48.832680 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-h4msx"] Dec 12 14:12:48 crc kubenswrapper[4663]: W1212 14:12:48.835336 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba4281a0_b789_4914_8ad6_aeea0b349c7b.slice/crio-1a2ddba1ab446b2c70414c1b23439d73626aeb1a4cfb14e9bf6de94a23c9186b WatchSource:0}: Error finding container 1a2ddba1ab446b2c70414c1b23439d73626aeb1a4cfb14e9bf6de94a23c9186b: Status 404 returned error can't find the container with id 1a2ddba1ab446b2c70414c1b23439d73626aeb1a4cfb14e9bf6de94a23c9186b Dec 12 14:12:49 crc kubenswrapper[4663]: I1212 14:12:49.626710 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" event={"ID":"ba4281a0-b789-4914-8ad6-aeea0b349c7b","Type":"ContainerStarted","Data":"1a2ddba1ab446b2c70414c1b23439d73626aeb1a4cfb14e9bf6de94a23c9186b"} Dec 12 14:12:49 crc kubenswrapper[4663]: I1212 14:12:49.627803 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" event={"ID":"8faa1738-9a0b-4e74-99ad-c7ea18277b98","Type":"ContainerStarted","Data":"a5af99a7578bb14e956b9310830ab7b4da9aeae66c4dc7c2974b21dee06acc1b"} Dec 12 14:12:49 crc kubenswrapper[4663]: I1212 14:12:49.628539 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-tw7gw" event={"ID":"06657a47-365d-4912-9444-21c7ead0ee4a","Type":"ContainerStarted","Data":"28dca5aad9d1760f7533dfcecd25d2bdffecc97f7a21e774c71ddd601bb7607e"} Dec 12 14:12:51 crc kubenswrapper[4663]: I1212 14:12:51.637148 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" event={"ID":"ba4281a0-b789-4914-8ad6-aeea0b349c7b","Type":"ContainerStarted","Data":"b7ea94380d84b2d6910a7cec33afcbb1ddc05d99511561f9d23d564bdef57339"} Dec 12 14:12:51 crc kubenswrapper[4663]: I1212 14:12:51.638473 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" event={"ID":"8faa1738-9a0b-4e74-99ad-c7ea18277b98","Type":"ContainerStarted","Data":"775137d9e4d00bca7c044580ad03bc9806bf4839649d44de3bf7b414a5c7f222"} Dec 12 14:12:51 crc kubenswrapper[4663]: I1212 14:12:51.638614 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:51 crc kubenswrapper[4663]: I1212 14:12:51.640046 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-tw7gw" event={"ID":"06657a47-365d-4912-9444-21c7ead0ee4a","Type":"ContainerStarted","Data":"0aaa7f85de9c200e2a73cd000d7053e6c6273541c84a1e446f59b9e0fe00955d"} Dec 12 14:12:51 crc kubenswrapper[4663]: I1212 14:12:51.648846 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-h4msx" podStartSLOduration=1.366803945 podStartE2EDuration="3.648837179s" podCreationTimestamp="2025-12-12 14:12:48 +0000 UTC" firstStartedPulling="2025-12-12 14:12:48.83723743 +0000 UTC m=+548.262655484" lastFinishedPulling="2025-12-12 14:12:51.119270664 +0000 UTC m=+550.544688718" observedRunningTime="2025-12-12 14:12:51.646431229 +0000 UTC m=+551.071849283" watchObservedRunningTime="2025-12-12 14:12:51.648837179 +0000 UTC m=+551.074255234" Dec 12 14:12:51 crc kubenswrapper[4663]: I1212 14:12:51.655726 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-tw7gw" podStartSLOduration=1.192067428 podStartE2EDuration="3.655719248s" podCreationTimestamp="2025-12-12 14:12:48 +0000 UTC" firstStartedPulling="2025-12-12 14:12:48.681427271 +0000 UTC m=+548.106845326" lastFinishedPulling="2025-12-12 14:12:51.145079092 +0000 UTC m=+550.570497146" observedRunningTime="2025-12-12 14:12:51.65542299 +0000 UTC m=+551.080841043" watchObservedRunningTime="2025-12-12 14:12:51.655719248 +0000 UTC m=+551.081137302" Dec 12 14:12:58 crc kubenswrapper[4663]: I1212 14:12:58.503803 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" Dec 12 14:12:58 crc kubenswrapper[4663]: I1212 14:12:58.513962 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-lgmg9" podStartSLOduration=8.081431216 podStartE2EDuration="10.513951261s" podCreationTimestamp="2025-12-12 14:12:48 +0000 UTC" firstStartedPulling="2025-12-12 14:12:48.703245991 +0000 UTC m=+548.128664045" lastFinishedPulling="2025-12-12 14:12:51.135766036 +0000 UTC m=+550.561184090" observedRunningTime="2025-12-12 14:12:51.667685021 +0000 UTC m=+551.093103075" watchObservedRunningTime="2025-12-12 14:12:58.513951261 +0000 UTC m=+557.939369316" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.568961 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rz99s"] Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569266 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-controller" containerID="cri-o://a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569369 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="northd" containerID="cri-o://3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569355 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="nbdb" containerID="cri-o://ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569407 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569460 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-node" containerID="cri-o://d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569489 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-acl-logging" containerID="cri-o://4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.569669 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="sbdb" containerID="cri-o://d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.595276 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" containerID="cri-o://6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" gracePeriod=30 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.668583 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/2.log" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.668935 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/1.log" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.668971 4663 generic.go:334] "Generic (PLEG): container finished" podID="262620a9-ff94-42e0-a449-6c1a022f9b77" containerID="944368ce3445b677c275a699db9996a6dd2fe3dd988f20e63deb301c87d49c53" exitCode=2 Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.668994 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerDied","Data":"944368ce3445b677c275a699db9996a6dd2fe3dd988f20e63deb301c87d49c53"} Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.669024 4663 scope.go:117] "RemoveContainer" containerID="f942609b2331fcb3eb4802c49440216bb54c0330ed206fa9159a09d32a1a0e09" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.669413 4663 scope.go:117] "RemoveContainer" containerID="944368ce3445b677c275a699db9996a6dd2fe3dd988f20e63deb301c87d49c53" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.669621 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-f2n4r_openshift-multus(262620a9-ff94-42e0-a449-6c1a022f9b77)\"" pod="openshift-multus/multus-f2n4r" podUID="262620a9-ff94-42e0-a449-6c1a022f9b77" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.832384 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/3.log" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.834268 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovn-acl-logging/0.log" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.834726 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovn-controller/0.log" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.835093 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869016 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-s7j7d"] Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869176 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-acl-logging" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869193 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-acl-logging" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869202 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-node" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869209 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-node" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869216 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="sbdb" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869221 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="sbdb" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869230 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kubecfg-setup" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869235 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kubecfg-setup" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869243 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869249 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869257 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869262 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869268 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869274 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869280 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869285 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869292 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="northd" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869297 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="northd" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869308 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="nbdb" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869313 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="nbdb" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869321 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-ovn-metrics" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869326 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-ovn-metrics" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869405 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-node" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869413 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="sbdb" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869420 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869428 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869434 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869439 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="northd" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869445 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="kube-rbac-proxy-ovn-metrics" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869453 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869460 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="nbdb" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869468 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovn-acl-logging" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869547 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869554 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: E1212 14:12:59.869561 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869566 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869639 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.869646 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerName="ovnkube-controller" Dec 12 14:12:59 crc kubenswrapper[4663]: I1212 14:12:59.871017 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000288 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-slash\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000337 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-env-overrides\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000354 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-ovn-kubernetes\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000371 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-var-lib-openvswitch\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000392 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6skt\" (UniqueName: \"kubernetes.io/projected/89a20840-03d2-434f-a5a1-3e71288c3ec5-kube-api-access-k6skt\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000384 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-slash" (OuterVolumeSpecName: "host-slash") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000408 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-node-log\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000430 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-systemd\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000458 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-config\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000471 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-kubelet\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000484 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-etc-openvswitch\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000498 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovn-node-metrics-cert\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000520 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-openvswitch\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000549 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-netd\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000566 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-ovn\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000580 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000599 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-log-socket\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000612 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-netns\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000631 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-bin\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000650 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-systemd-units\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000678 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-script-lib\") pod \"89a20840-03d2-434f-a5a1-3e71288c3ec5\" (UID: \"89a20840-03d2-434f-a5a1-3e71288c3ec5\") " Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000815 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-ovnkube-config\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000835 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-slash\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000850 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-env-overrides\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000868 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-systemd\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000884 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-ovn\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000425 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000898 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000789 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000817 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000834 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000850 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000874 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000932 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-node-log\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000980 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-log-socket" (OuterVolumeSpecName: "log-socket") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.000993 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001004 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-systemd-units\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001029 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001036 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-node-log" (OuterVolumeSpecName: "node-log") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001049 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001071 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001043 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001085 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/983527e4-025b-45b2-8a56-4d4389b9aca4-ovn-node-metrics-cert\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001113 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-etc-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001063 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001160 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-kubelet\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001181 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-var-lib-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001226 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-log-socket\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001254 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001279 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-ovnkube-script-lib\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001309 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-cni-bin\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001319 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001359 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-cni-netd\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001385 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-run-netns\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001407 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-run-ovn-kubernetes\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001407 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001428 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj92h\" (UniqueName: \"kubernetes.io/projected/983527e4-025b-45b2-8a56-4d4389b9aca4-kube-api-access-zj92h\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001484 4663 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001496 4663 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001507 4663 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001517 4663 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001529 4663 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001552 4663 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001562 4663 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-log-socket\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001571 4663 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001580 4663 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001589 4663 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001599 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001611 4663 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-slash\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001619 4663 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001627 4663 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001637 4663 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001646 4663 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-node-log\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.001655 4663 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.004915 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.005181 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89a20840-03d2-434f-a5a1-3e71288c3ec5-kube-api-access-k6skt" (OuterVolumeSpecName: "kube-api-access-k6skt") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "kube-api-access-k6skt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.011524 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "89a20840-03d2-434f-a5a1-3e71288c3ec5" (UID: "89a20840-03d2-434f-a5a1-3e71288c3ec5"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.101970 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-kubelet\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102002 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-var-lib-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102022 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-log-socket\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102037 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102054 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-ovnkube-script-lib\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102071 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-cni-bin\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102080 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-var-lib-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102089 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-cni-netd\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102123 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-cni-netd\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102119 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-kubelet\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102138 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-run-netns\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102150 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102159 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-log-socket\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102231 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-cni-bin\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102235 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-run-netns\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102284 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-run-ovn-kubernetes\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102304 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj92h\" (UniqueName: \"kubernetes.io/projected/983527e4-025b-45b2-8a56-4d4389b9aca4-kube-api-access-zj92h\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102318 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-ovnkube-config\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102332 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-slash\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102346 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-env-overrides\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102353 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-run-ovn-kubernetes\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102387 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-systemd\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102363 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-systemd\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102430 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-ovn\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102447 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102466 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-node-log\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102494 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-systemd-units\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102515 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/983527e4-025b-45b2-8a56-4d4389b9aca4-ovn-node-metrics-cert\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102529 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-etc-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102584 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6skt\" (UniqueName: \"kubernetes.io/projected/89a20840-03d2-434f-a5a1-3e71288c3ec5-kube-api-access-k6skt\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102595 4663 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/89a20840-03d2-434f-a5a1-3e71288c3ec5-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102611 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-node-log\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102610 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-run-ovn\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102624 4663 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/89a20840-03d2-434f-a5a1-3e71288c3ec5-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102649 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-systemd-units\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102649 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102679 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-host-slash\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102703 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/983527e4-025b-45b2-8a56-4d4389b9aca4-etc-openvswitch\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102734 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-ovnkube-script-lib\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.102886 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-ovnkube-config\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.103135 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/983527e4-025b-45b2-8a56-4d4389b9aca4-env-overrides\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.104865 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/983527e4-025b-45b2-8a56-4d4389b9aca4-ovn-node-metrics-cert\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.115257 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj92h\" (UniqueName: \"kubernetes.io/projected/983527e4-025b-45b2-8a56-4d4389b9aca4-kube-api-access-zj92h\") pod \"ovnkube-node-s7j7d\" (UID: \"983527e4-025b-45b2-8a56-4d4389b9aca4\") " pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.181399 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:00 crc kubenswrapper[4663]: W1212 14:13:00.193770 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod983527e4_025b_45b2_8a56_4d4389b9aca4.slice/crio-c81a4ea3891f87aea43906a9ffeecd91e2b4479c3b09031b19b4a07bdf0c569b WatchSource:0}: Error finding container c81a4ea3891f87aea43906a9ffeecd91e2b4479c3b09031b19b4a07bdf0c569b: Status 404 returned error can't find the container with id c81a4ea3891f87aea43906a9ffeecd91e2b4479c3b09031b19b4a07bdf0c569b Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.674149 4663 generic.go:334] "Generic (PLEG): container finished" podID="983527e4-025b-45b2-8a56-4d4389b9aca4" containerID="0b8859217f5cdeae51346ff0cc4e7dfc90b30dbd609a82d38e982ab27d146216" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.674240 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerDied","Data":"0b8859217f5cdeae51346ff0cc4e7dfc90b30dbd609a82d38e982ab27d146216"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.674373 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"c81a4ea3891f87aea43906a9ffeecd91e2b4479c3b09031b19b4a07bdf0c569b"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.675780 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/2.log" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.678108 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovnkube-controller/3.log" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.679891 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovn-acl-logging/0.log" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680248 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rz99s_89a20840-03d2-434f-a5a1-3e71288c3ec5/ovn-controller/0.log" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680531 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680567 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680575 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680577 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680604 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680584 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680624 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680626 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680641 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680652 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680661 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680670 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680679 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680689 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680689 4663 scope.go:117] "RemoveContainer" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680694 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680760 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680772 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680777 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680782 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680787 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680792 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680630 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" exitCode=0 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680809 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" exitCode=143 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680819 4663 generic.go:334] "Generic (PLEG): container finished" podID="89a20840-03d2-434f-a5a1-3e71288c3ec5" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" exitCode=143 Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680832 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680844 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680851 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680855 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680860 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680865 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680869 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680874 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680879 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680885 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680890 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680897 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680905 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680911 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680916 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680922 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680927 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680932 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680936 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680941 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680946 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680950 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680956 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rz99s" event={"ID":"89a20840-03d2-434f-a5a1-3e71288c3ec5","Type":"ContainerDied","Data":"49d6ebc54ef9d8b27a907b311e2478143db3e78f72bcdcdcbf2c1a7b4da9dcc7"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680962 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680968 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680973 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680978 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680983 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680987 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680992 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.680996 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.681000 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.681005 4663 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.695351 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.715769 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rz99s"] Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.718522 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rz99s"] Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.729041 4663 scope.go:117] "RemoveContainer" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.753328 4663 scope.go:117] "RemoveContainer" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.773467 4663 scope.go:117] "RemoveContainer" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.782920 4663 scope.go:117] "RemoveContainer" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.792410 4663 scope.go:117] "RemoveContainer" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.803394 4663 scope.go:117] "RemoveContainer" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.813824 4663 scope.go:117] "RemoveContainer" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.835690 4663 scope.go:117] "RemoveContainer" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.849756 4663 scope.go:117] "RemoveContainer" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.850006 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": container with ID starting with 6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53 not found: ID does not exist" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850038 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} err="failed to get container status \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": rpc error: code = NotFound desc = could not find container \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": container with ID starting with 6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850057 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.850248 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": container with ID starting with 551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072 not found: ID does not exist" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850268 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} err="failed to get container status \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": rpc error: code = NotFound desc = could not find container \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": container with ID starting with 551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850283 4663 scope.go:117] "RemoveContainer" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.850452 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": container with ID starting with d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030 not found: ID does not exist" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850471 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} err="failed to get container status \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": rpc error: code = NotFound desc = could not find container \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": container with ID starting with d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850483 4663 scope.go:117] "RemoveContainer" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.850650 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": container with ID starting with ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486 not found: ID does not exist" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850670 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} err="failed to get container status \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": rpc error: code = NotFound desc = could not find container \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": container with ID starting with ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850682 4663 scope.go:117] "RemoveContainer" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.850901 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": container with ID starting with 3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4 not found: ID does not exist" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850920 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} err="failed to get container status \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": rpc error: code = NotFound desc = could not find container \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": container with ID starting with 3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.850931 4663 scope.go:117] "RemoveContainer" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.851075 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": container with ID starting with 64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218 not found: ID does not exist" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851095 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} err="failed to get container status \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": rpc error: code = NotFound desc = could not find container \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": container with ID starting with 64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851108 4663 scope.go:117] "RemoveContainer" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.851244 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": container with ID starting with d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f not found: ID does not exist" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851261 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} err="failed to get container status \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": rpc error: code = NotFound desc = could not find container \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": container with ID starting with d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851272 4663 scope.go:117] "RemoveContainer" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.851408 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": container with ID starting with 4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d not found: ID does not exist" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851427 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} err="failed to get container status \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": rpc error: code = NotFound desc = could not find container \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": container with ID starting with 4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851438 4663 scope.go:117] "RemoveContainer" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.851593 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": container with ID starting with a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc not found: ID does not exist" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851611 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} err="failed to get container status \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": rpc error: code = NotFound desc = could not find container \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": container with ID starting with a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851623 4663 scope.go:117] "RemoveContainer" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" Dec 12 14:13:00 crc kubenswrapper[4663]: E1212 14:13:00.851791 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": container with ID starting with c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2 not found: ID does not exist" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851810 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} err="failed to get container status \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": rpc error: code = NotFound desc = could not find container \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": container with ID starting with c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851821 4663 scope.go:117] "RemoveContainer" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851962 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} err="failed to get container status \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": rpc error: code = NotFound desc = could not find container \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": container with ID starting with 6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.851979 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852165 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} err="failed to get container status \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": rpc error: code = NotFound desc = could not find container \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": container with ID starting with 551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852182 4663 scope.go:117] "RemoveContainer" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852318 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} err="failed to get container status \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": rpc error: code = NotFound desc = could not find container \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": container with ID starting with d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852334 4663 scope.go:117] "RemoveContainer" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852474 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} err="failed to get container status \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": rpc error: code = NotFound desc = could not find container \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": container with ID starting with ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852490 4663 scope.go:117] "RemoveContainer" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852651 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} err="failed to get container status \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": rpc error: code = NotFound desc = could not find container \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": container with ID starting with 3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852669 4663 scope.go:117] "RemoveContainer" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852829 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} err="failed to get container status \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": rpc error: code = NotFound desc = could not find container \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": container with ID starting with 64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852847 4663 scope.go:117] "RemoveContainer" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852983 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} err="failed to get container status \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": rpc error: code = NotFound desc = could not find container \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": container with ID starting with d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.852999 4663 scope.go:117] "RemoveContainer" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853140 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} err="failed to get container status \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": rpc error: code = NotFound desc = could not find container \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": container with ID starting with 4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853157 4663 scope.go:117] "RemoveContainer" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853288 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} err="failed to get container status \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": rpc error: code = NotFound desc = could not find container \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": container with ID starting with a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853305 4663 scope.go:117] "RemoveContainer" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853450 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} err="failed to get container status \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": rpc error: code = NotFound desc = could not find container \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": container with ID starting with c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853466 4663 scope.go:117] "RemoveContainer" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853634 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} err="failed to get container status \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": rpc error: code = NotFound desc = could not find container \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": container with ID starting with 6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853651 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853811 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} err="failed to get container status \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": rpc error: code = NotFound desc = could not find container \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": container with ID starting with 551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853827 4663 scope.go:117] "RemoveContainer" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853964 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} err="failed to get container status \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": rpc error: code = NotFound desc = could not find container \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": container with ID starting with d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.853979 4663 scope.go:117] "RemoveContainer" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854107 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} err="failed to get container status \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": rpc error: code = NotFound desc = could not find container \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": container with ID starting with ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854124 4663 scope.go:117] "RemoveContainer" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854342 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} err="failed to get container status \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": rpc error: code = NotFound desc = could not find container \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": container with ID starting with 3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854355 4663 scope.go:117] "RemoveContainer" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854495 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} err="failed to get container status \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": rpc error: code = NotFound desc = could not find container \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": container with ID starting with 64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854512 4663 scope.go:117] "RemoveContainer" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854666 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} err="failed to get container status \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": rpc error: code = NotFound desc = could not find container \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": container with ID starting with d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854682 4663 scope.go:117] "RemoveContainer" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854847 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} err="failed to get container status \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": rpc error: code = NotFound desc = could not find container \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": container with ID starting with 4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.854863 4663 scope.go:117] "RemoveContainer" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855015 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} err="failed to get container status \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": rpc error: code = NotFound desc = could not find container \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": container with ID starting with a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855032 4663 scope.go:117] "RemoveContainer" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855162 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} err="failed to get container status \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": rpc error: code = NotFound desc = could not find container \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": container with ID starting with c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855179 4663 scope.go:117] "RemoveContainer" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855311 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} err="failed to get container status \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": rpc error: code = NotFound desc = could not find container \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": container with ID starting with 6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855326 4663 scope.go:117] "RemoveContainer" containerID="551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855453 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072"} err="failed to get container status \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": rpc error: code = NotFound desc = could not find container \"551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072\": container with ID starting with 551e426c21747a30cc146bb478bd389ccdafe5ab8a31bd2d77973737d6cdc072 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855469 4663 scope.go:117] "RemoveContainer" containerID="d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855619 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030"} err="failed to get container status \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": rpc error: code = NotFound desc = could not find container \"d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030\": container with ID starting with d74a5e5d21ce01661d81ce897d5a6f0f2a9b035eaa9ddb61c252bd53bad66030 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855635 4663 scope.go:117] "RemoveContainer" containerID="ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855973 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486"} err="failed to get container status \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": rpc error: code = NotFound desc = could not find container \"ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486\": container with ID starting with ea7b3cf360613d65924ded0d841a4774f3ae7bd21a4e98074388548a33b65486 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.855994 4663 scope.go:117] "RemoveContainer" containerID="3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856171 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4"} err="failed to get container status \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": rpc error: code = NotFound desc = could not find container \"3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4\": container with ID starting with 3617e4b87187b756afee9f50739a5c1e41337f18f6c15d188f2cd8f5f10573e4 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856195 4663 scope.go:117] "RemoveContainer" containerID="64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856369 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218"} err="failed to get container status \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": rpc error: code = NotFound desc = could not find container \"64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218\": container with ID starting with 64dcdc25af075ee8b653f7b0ff00aab43b01c19e3f45cedac3598b76f59b8218 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856388 4663 scope.go:117] "RemoveContainer" containerID="d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856555 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f"} err="failed to get container status \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": rpc error: code = NotFound desc = could not find container \"d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f\": container with ID starting with d8b0c081713b70858ac4b82df30741afee7bab12e61da63587ae37b512c7d61f not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856573 4663 scope.go:117] "RemoveContainer" containerID="4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856713 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d"} err="failed to get container status \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": rpc error: code = NotFound desc = could not find container \"4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d\": container with ID starting with 4c4a22242fbe359ae91fbf20e870cb8c6f43bd9d250b5667ab5c44da176c144d not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856732 4663 scope.go:117] "RemoveContainer" containerID="a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856892 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc"} err="failed to get container status \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": rpc error: code = NotFound desc = could not find container \"a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc\": container with ID starting with a4d1dd111788cadf20a4b4e9d25313728e2d54302c71068e547696f42993c9bc not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.856910 4663 scope.go:117] "RemoveContainer" containerID="c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.857456 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2"} err="failed to get container status \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": rpc error: code = NotFound desc = could not find container \"c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2\": container with ID starting with c8d83578195e463af191bd852efab08e78d8a7652bc9b842ec1046951be3cfd2 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.857474 4663 scope.go:117] "RemoveContainer" containerID="6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.857850 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53"} err="failed to get container status \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": rpc error: code = NotFound desc = could not find container \"6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53\": container with ID starting with 6d93fd39ee651face632746a8c8a97b8acd6b5b76b5cf1addcd64c2563befd53 not found: ID does not exist" Dec 12 14:13:00 crc kubenswrapper[4663]: I1212 14:13:00.875765 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89a20840-03d2-434f-a5a1-3e71288c3ec5" path="/var/lib/kubelet/pods/89a20840-03d2-434f-a5a1-3e71288c3ec5/volumes" Dec 12 14:13:01 crc kubenswrapper[4663]: I1212 14:13:01.688893 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"71d5178e7f8f8e904ea81893bf3c8e621ab547a84e543d00149bb7c69fcde65f"} Dec 12 14:13:01 crc kubenswrapper[4663]: I1212 14:13:01.688932 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"159565d71d37b40f6c52041fea6c3e5fd63e96d69b8b91b95925f9a7bd9fa2f9"} Dec 12 14:13:01 crc kubenswrapper[4663]: I1212 14:13:01.688956 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"b257d251dd9ddf8efa780172a2288d290c58dfbed651b6dee8c7a2efd45af599"} Dec 12 14:13:01 crc kubenswrapper[4663]: I1212 14:13:01.688967 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"8a249ba5909d686ec26873a4ea1d0da8b80c04cf8c1587fd9fd864e5bf6db131"} Dec 12 14:13:01 crc kubenswrapper[4663]: I1212 14:13:01.688974 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"ee919f07ab07ece45901601c091e74bfe72509ede8503114e83b7ef089179024"} Dec 12 14:13:01 crc kubenswrapper[4663]: I1212 14:13:01.688988 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"81537a196014c16490e300e1033c584da7095864b0b108a284a9403f6a49a1fd"} Dec 12 14:13:03 crc kubenswrapper[4663]: I1212 14:13:03.699233 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"345cfda7d96b017a9de0737e37f675fb896f49258934d759936abee584785add"} Dec 12 14:13:05 crc kubenswrapper[4663]: I1212 14:13:05.710147 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" event={"ID":"983527e4-025b-45b2-8a56-4d4389b9aca4","Type":"ContainerStarted","Data":"aedf5610e17b935bf1a9dcb79fd3dc585b56438446fe03b71c04033351736af4"} Dec 12 14:13:05 crc kubenswrapper[4663]: I1212 14:13:05.710502 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:05 crc kubenswrapper[4663]: I1212 14:13:05.710524 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:05 crc kubenswrapper[4663]: I1212 14:13:05.740764 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" podStartSLOduration=6.740737621 podStartE2EDuration="6.740737621s" podCreationTimestamp="2025-12-12 14:12:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:13:05.734486993 +0000 UTC m=+565.159905048" watchObservedRunningTime="2025-12-12 14:13:05.740737621 +0000 UTC m=+565.166155675" Dec 12 14:13:05 crc kubenswrapper[4663]: I1212 14:13:05.744551 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.129145 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.129200 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.129239 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.129684 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fdd804f3d5e00648af4f1a04594dd26f7076d16b07130cdcecfe1b8605b9a2f9"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.129732 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://fdd804f3d5e00648af4f1a04594dd26f7076d16b07130cdcecfe1b8605b9a2f9" gracePeriod=600 Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.715724 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="fdd804f3d5e00648af4f1a04594dd26f7076d16b07130cdcecfe1b8605b9a2f9" exitCode=0 Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.715795 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"fdd804f3d5e00648af4f1a04594dd26f7076d16b07130cdcecfe1b8605b9a2f9"} Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.715949 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"5c5ebe9bf5e9a3e64cdedfa80ce55ee02c552fa8f4a03ac34da20964a3cfae7b"} Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.715969 4663 scope.go:117] "RemoveContainer" containerID="1e9ca5f2aeb91abe2527bcfb8a9ce3821e586c8032624f0a0df36fe70c76b8ae" Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.716270 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:06 crc kubenswrapper[4663]: I1212 14:13:06.736302 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:10 crc kubenswrapper[4663]: I1212 14:13:10.871472 4663 scope.go:117] "RemoveContainer" containerID="944368ce3445b677c275a699db9996a6dd2fe3dd988f20e63deb301c87d49c53" Dec 12 14:13:10 crc kubenswrapper[4663]: E1212 14:13:10.871934 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-f2n4r_openshift-multus(262620a9-ff94-42e0-a449-6c1a022f9b77)\"" pod="openshift-multus/multus-f2n4r" podUID="262620a9-ff94-42e0-a449-6c1a022f9b77" Dec 12 14:13:23 crc kubenswrapper[4663]: I1212 14:13:23.869707 4663 scope.go:117] "RemoveContainer" containerID="944368ce3445b677c275a699db9996a6dd2fe3dd988f20e63deb301c87d49c53" Dec 12 14:13:24 crc kubenswrapper[4663]: I1212 14:13:24.779453 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-f2n4r_262620a9-ff94-42e0-a449-6c1a022f9b77/kube-multus/2.log" Dec 12 14:13:24 crc kubenswrapper[4663]: I1212 14:13:24.779703 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-f2n4r" event={"ID":"262620a9-ff94-42e0-a449-6c1a022f9b77","Type":"ContainerStarted","Data":"004513ba2bcd00b6def4b42f2a467aa52fcc1127a81bf6eb99286c9cf4d34e5c"} Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.940175 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst"] Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.941159 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.945372 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.948011 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst"] Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.971284 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.971340 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:27 crc kubenswrapper[4663]: I1212 14:13:27.971413 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfksp\" (UniqueName: \"kubernetes.io/projected/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-kube-api-access-vfksp\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.072551 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.072595 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.072647 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfksp\" (UniqueName: \"kubernetes.io/projected/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-kube-api-access-vfksp\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.073029 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.073090 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.087373 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfksp\" (UniqueName: \"kubernetes.io/projected/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-kube-api-access-vfksp\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.252389 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.583528 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst"] Dec 12 14:13:28 crc kubenswrapper[4663]: W1212 14:13:28.587317 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ff84d75_5a7c_4a71_8e6b_fcc02d38b392.slice/crio-41e05b4ce7fc87d22cfc83a942fa14c2fc6fbdb1fb2fff04c8c8d48fd9c1c780 WatchSource:0}: Error finding container 41e05b4ce7fc87d22cfc83a942fa14c2fc6fbdb1fb2fff04c8c8d48fd9c1c780: Status 404 returned error can't find the container with id 41e05b4ce7fc87d22cfc83a942fa14c2fc6fbdb1fb2fff04c8c8d48fd9c1c780 Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.794300 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" event={"ID":"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392","Type":"ContainerStarted","Data":"32c6bb3cd391d8f651482e9a23d58fb9b3a3bb97070bcdb24c19fdfffe033b1f"} Dec 12 14:13:28 crc kubenswrapper[4663]: I1212 14:13:28.794337 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" event={"ID":"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392","Type":"ContainerStarted","Data":"41e05b4ce7fc87d22cfc83a942fa14c2fc6fbdb1fb2fff04c8c8d48fd9c1c780"} Dec 12 14:13:29 crc kubenswrapper[4663]: I1212 14:13:29.799821 4663 generic.go:334] "Generic (PLEG): container finished" podID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerID="32c6bb3cd391d8f651482e9a23d58fb9b3a3bb97070bcdb24c19fdfffe033b1f" exitCode=0 Dec 12 14:13:29 crc kubenswrapper[4663]: I1212 14:13:29.799888 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" event={"ID":"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392","Type":"ContainerDied","Data":"32c6bb3cd391d8f651482e9a23d58fb9b3a3bb97070bcdb24c19fdfffe033b1f"} Dec 12 14:13:30 crc kubenswrapper[4663]: I1212 14:13:30.196493 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-s7j7d" Dec 12 14:13:31 crc kubenswrapper[4663]: I1212 14:13:31.810363 4663 generic.go:334] "Generic (PLEG): container finished" podID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerID="f9ee6d6c227144f2b0dd0acf37d0dff9fee5799cddc9a00034f9bc87653cbb60" exitCode=0 Dec 12 14:13:31 crc kubenswrapper[4663]: I1212 14:13:31.810445 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" event={"ID":"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392","Type":"ContainerDied","Data":"f9ee6d6c227144f2b0dd0acf37d0dff9fee5799cddc9a00034f9bc87653cbb60"} Dec 12 14:13:32 crc kubenswrapper[4663]: I1212 14:13:32.815319 4663 generic.go:334] "Generic (PLEG): container finished" podID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerID="c0cd1a585ce0ca965bbe4bb3f49c3f830d9adfcd7ccbaa03c5383f36444e6bae" exitCode=0 Dec 12 14:13:32 crc kubenswrapper[4663]: I1212 14:13:32.815355 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" event={"ID":"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392","Type":"ContainerDied","Data":"c0cd1a585ce0ca965bbe4bb3f49c3f830d9adfcd7ccbaa03c5383f36444e6bae"} Dec 12 14:13:33 crc kubenswrapper[4663]: I1212 14:13:33.977805 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.125053 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfksp\" (UniqueName: \"kubernetes.io/projected/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-kube-api-access-vfksp\") pod \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.125099 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-util\") pod \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.125136 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-bundle\") pod \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\" (UID: \"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392\") " Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.125630 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-bundle" (OuterVolumeSpecName: "bundle") pod "2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" (UID: "2ff84d75-5a7c-4a71-8e6b-fcc02d38b392"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.128734 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-kube-api-access-vfksp" (OuterVolumeSpecName: "kube-api-access-vfksp") pod "2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" (UID: "2ff84d75-5a7c-4a71-8e6b-fcc02d38b392"). InnerVolumeSpecName "kube-api-access-vfksp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.132252 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-util" (OuterVolumeSpecName: "util") pod "2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" (UID: "2ff84d75-5a7c-4a71-8e6b-fcc02d38b392"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.225664 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfksp\" (UniqueName: \"kubernetes.io/projected/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-kube-api-access-vfksp\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.225690 4663 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-util\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.225701 4663 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2ff84d75-5a7c-4a71-8e6b-fcc02d38b392-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.823619 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" event={"ID":"2ff84d75-5a7c-4a71-8e6b-fcc02d38b392","Type":"ContainerDied","Data":"41e05b4ce7fc87d22cfc83a942fa14c2fc6fbdb1fb2fff04c8c8d48fd9c1c780"} Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.823653 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41e05b4ce7fc87d22cfc83a942fa14c2fc6fbdb1fb2fff04c8c8d48fd9c1c780" Dec 12 14:13:34 crc kubenswrapper[4663]: I1212 14:13:34.823664 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.190689 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-rwbmc"] Dec 12 14:13:36 crc kubenswrapper[4663]: E1212 14:13:36.191057 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="util" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.191068 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="util" Dec 12 14:13:36 crc kubenswrapper[4663]: E1212 14:13:36.191079 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="extract" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.191084 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="extract" Dec 12 14:13:36 crc kubenswrapper[4663]: E1212 14:13:36.191092 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="pull" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.191097 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="pull" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.191176 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ff84d75-5a7c-4a71-8e6b-fcc02d38b392" containerName="extract" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.191494 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.193032 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.193175 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-4mrgf" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.193245 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.197864 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-rwbmc"] Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.242926 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2jg2\" (UniqueName: \"kubernetes.io/projected/890e13cb-bf05-4143-9dba-9979bbfbe8e8-kube-api-access-f2jg2\") pod \"nmstate-operator-6769fb99d-rwbmc\" (UID: \"890e13cb-bf05-4143-9dba-9979bbfbe8e8\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.343856 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2jg2\" (UniqueName: \"kubernetes.io/projected/890e13cb-bf05-4143-9dba-9979bbfbe8e8-kube-api-access-f2jg2\") pod \"nmstate-operator-6769fb99d-rwbmc\" (UID: \"890e13cb-bf05-4143-9dba-9979bbfbe8e8\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.357903 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2jg2\" (UniqueName: \"kubernetes.io/projected/890e13cb-bf05-4143-9dba-9979bbfbe8e8-kube-api-access-f2jg2\") pod \"nmstate-operator-6769fb99d-rwbmc\" (UID: \"890e13cb-bf05-4143-9dba-9979bbfbe8e8\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.502090 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" Dec 12 14:13:36 crc kubenswrapper[4663]: I1212 14:13:36.830428 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-rwbmc"] Dec 12 14:13:36 crc kubenswrapper[4663]: W1212 14:13:36.832470 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod890e13cb_bf05_4143_9dba_9979bbfbe8e8.slice/crio-f385707b962b49bcb6a731ef7549d29e9f2b17e3817e3aba5f98833316b0780e WatchSource:0}: Error finding container f385707b962b49bcb6a731ef7549d29e9f2b17e3817e3aba5f98833316b0780e: Status 404 returned error can't find the container with id f385707b962b49bcb6a731ef7549d29e9f2b17e3817e3aba5f98833316b0780e Dec 12 14:13:37 crc kubenswrapper[4663]: I1212 14:13:37.834405 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" event={"ID":"890e13cb-bf05-4143-9dba-9979bbfbe8e8","Type":"ContainerStarted","Data":"f385707b962b49bcb6a731ef7549d29e9f2b17e3817e3aba5f98833316b0780e"} Dec 12 14:13:39 crc kubenswrapper[4663]: I1212 14:13:39.844998 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" event={"ID":"890e13cb-bf05-4143-9dba-9979bbfbe8e8","Type":"ContainerStarted","Data":"ddc7b2d89a445f0760dedd41e4e41f115054fc37fc8a0144d5d28d1c48b72d57"} Dec 12 14:13:39 crc kubenswrapper[4663]: I1212 14:13:39.856297 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-rwbmc" podStartSLOduration=1.5343753580000001 podStartE2EDuration="3.856283826s" podCreationTimestamp="2025-12-12 14:13:36 +0000 UTC" firstStartedPulling="2025-12-12 14:13:36.83414153 +0000 UTC m=+596.259559584" lastFinishedPulling="2025-12-12 14:13:39.156049999 +0000 UTC m=+598.581468052" observedRunningTime="2025-12-12 14:13:39.854713363 +0000 UTC m=+599.280131427" watchObservedRunningTime="2025-12-12 14:13:39.856283826 +0000 UTC m=+599.281701880" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.613599 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.614276 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.616455 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-78jr2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.625635 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.626170 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.627277 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.633206 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.635424 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-pg97r"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.636046 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.637638 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.713532 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.714091 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.715315 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-pqqrn" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.715328 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.715908 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.720716 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788514 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxjcq\" (UniqueName: \"kubernetes.io/projected/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-kube-api-access-mxjcq\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788560 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-nmstate-lock\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788603 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqvd8\" (UniqueName: \"kubernetes.io/projected/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-kube-api-access-cqvd8\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788641 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/19e4fc6a-b15f-4c91-9caa-08e836723adc-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-dw2nz\" (UID: \"19e4fc6a-b15f-4c91-9caa-08e836723adc\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788666 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-ovs-socket\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788688 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlzzq\" (UniqueName: \"kubernetes.io/projected/e0838bcf-5ff7-49cc-a108-ac6c9f98b92c-kube-api-access-jlzzq\") pod \"nmstate-metrics-7f7f7578db-v5c4t\" (UID: \"e0838bcf-5ff7-49cc-a108-ac6c9f98b92c\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788805 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-dbus-socket\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788870 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh2nj\" (UniqueName: \"kubernetes.io/projected/19e4fc6a-b15f-4c91-9caa-08e836723adc-kube-api-access-sh2nj\") pod \"nmstate-webhook-f8fb84555-dw2nz\" (UID: \"19e4fc6a-b15f-4c91-9caa-08e836723adc\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788925 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.788967 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892341 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqvd8\" (UniqueName: \"kubernetes.io/projected/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-kube-api-access-cqvd8\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892381 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/19e4fc6a-b15f-4c91-9caa-08e836723adc-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-dw2nz\" (UID: \"19e4fc6a-b15f-4c91-9caa-08e836723adc\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892400 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-ovs-socket\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892417 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlzzq\" (UniqueName: \"kubernetes.io/projected/e0838bcf-5ff7-49cc-a108-ac6c9f98b92c-kube-api-access-jlzzq\") pod \"nmstate-metrics-7f7f7578db-v5c4t\" (UID: \"e0838bcf-5ff7-49cc-a108-ac6c9f98b92c\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892438 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-dbus-socket\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892468 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh2nj\" (UniqueName: \"kubernetes.io/projected/19e4fc6a-b15f-4c91-9caa-08e836723adc-kube-api-access-sh2nj\") pod \"nmstate-webhook-f8fb84555-dw2nz\" (UID: \"19e4fc6a-b15f-4c91-9caa-08e836723adc\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892497 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892515 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892756 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxjcq\" (UniqueName: \"kubernetes.io/projected/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-kube-api-access-mxjcq\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892780 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-nmstate-lock\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.892849 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-nmstate-lock\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.893460 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-ovs-socket\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.893815 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6fc575d5d9-5s4f2"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.893847 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-dbus-socket\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.894666 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.896287 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6fc575d5d9-5s4f2"] Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.897778 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.898010 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.898156 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.904499 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.909794 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqvd8\" (UniqueName: \"kubernetes.io/projected/3e5fad3b-f0aa-4dd2-a485-d44a365605f1-kube-api-access-cqvd8\") pod \"nmstate-handler-pg97r\" (UID: \"3e5fad3b-f0aa-4dd2-a485-d44a365605f1\") " pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.912253 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlzzq\" (UniqueName: \"kubernetes.io/projected/e0838bcf-5ff7-49cc-a108-ac6c9f98b92c-kube-api-access-jlzzq\") pod \"nmstate-metrics-7f7f7578db-v5c4t\" (UID: \"e0838bcf-5ff7-49cc-a108-ac6c9f98b92c\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.913020 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh2nj\" (UniqueName: \"kubernetes.io/projected/19e4fc6a-b15f-4c91-9caa-08e836723adc-kube-api-access-sh2nj\") pod \"nmstate-webhook-f8fb84555-dw2nz\" (UID: \"19e4fc6a-b15f-4c91-9caa-08e836723adc\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.913878 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.916241 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxjcq\" (UniqueName: \"kubernetes.io/projected/b62c8bbe-eb6b-4f0a-ade2-813a6a149c36-kube-api-access-mxjcq\") pod \"nmstate-console-plugin-6ff7998486-wp687\" (UID: \"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.919352 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/19e4fc6a-b15f-4c91-9caa-08e836723adc-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-dw2nz\" (UID: \"19e4fc6a-b15f-4c91-9caa-08e836723adc\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.928028 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-78jr2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.935020 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.937070 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.945061 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:40 crc kubenswrapper[4663]: W1212 14:13:40.968269 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e5fad3b_f0aa_4dd2_a485_d44a365605f1.slice/crio-046b504866fd21baccc717b18051cc90ca168413b6244dc5422b385b25df375f WatchSource:0}: Error finding container 046b504866fd21baccc717b18051cc90ca168413b6244dc5422b385b25df375f: Status 404 returned error can't find the container with id 046b504866fd21baccc717b18051cc90ca168413b6244dc5422b385b25df375f Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993325 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb4qx\" (UniqueName: \"kubernetes.io/projected/0b325b21-dfa8-4f7d-8896-2901013aec91-kube-api-access-wb4qx\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993359 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-oauth-serving-cert\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993393 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-console-config\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993431 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-service-ca\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993460 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0b325b21-dfa8-4f7d-8896-2901013aec91-console-oauth-config\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993495 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0b325b21-dfa8-4f7d-8896-2901013aec91-console-serving-cert\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:40 crc kubenswrapper[4663]: I1212 14:13:40.993540 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-trusted-ca-bundle\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.027478 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-pqqrn" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.035107 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.094856 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-console-config\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.094900 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-service-ca\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.094930 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0b325b21-dfa8-4f7d-8896-2901013aec91-console-oauth-config\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.094951 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0b325b21-dfa8-4f7d-8896-2901013aec91-console-serving-cert\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.094968 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-trusted-ca-bundle\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.094984 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb4qx\" (UniqueName: \"kubernetes.io/projected/0b325b21-dfa8-4f7d-8896-2901013aec91-kube-api-access-wb4qx\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.095003 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-oauth-serving-cert\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.096532 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-service-ca\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.097204 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-trusted-ca-bundle\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.097287 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-oauth-serving-cert\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.098571 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0b325b21-dfa8-4f7d-8896-2901013aec91-console-config\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.100719 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0b325b21-dfa8-4f7d-8896-2901013aec91-console-serving-cert\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.100927 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0b325b21-dfa8-4f7d-8896-2901013aec91-console-oauth-config\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.110500 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb4qx\" (UniqueName: \"kubernetes.io/projected/0b325b21-dfa8-4f7d-8896-2901013aec91-kube-api-access-wb4qx\") pod \"console-6fc575d5d9-5s4f2\" (UID: \"0b325b21-dfa8-4f7d-8896-2901013aec91\") " pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.239933 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.280787 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t"] Dec 12 14:13:41 crc kubenswrapper[4663]: W1212 14:13:41.286181 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0838bcf_5ff7_49cc_a108_ac6c9f98b92c.slice/crio-563489c97e89958699dc27963d03923c667139f7157943eca17ff14ec4876397 WatchSource:0}: Error finding container 563489c97e89958699dc27963d03923c667139f7157943eca17ff14ec4876397: Status 404 returned error can't find the container with id 563489c97e89958699dc27963d03923c667139f7157943eca17ff14ec4876397 Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.308924 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz"] Dec 12 14:13:41 crc kubenswrapper[4663]: W1212 14:13:41.315219 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19e4fc6a_b15f_4c91_9caa_08e836723adc.slice/crio-83cb2e04b5152590c670341dbfd9a9bd54b0279b16ca065d6acd2a58f4d292ca WatchSource:0}: Error finding container 83cb2e04b5152590c670341dbfd9a9bd54b0279b16ca065d6acd2a58f4d292ca: Status 404 returned error can't find the container with id 83cb2e04b5152590c670341dbfd9a9bd54b0279b16ca065d6acd2a58f4d292ca Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.363971 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687"] Dec 12 14:13:41 crc kubenswrapper[4663]: W1212 14:13:41.366441 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb62c8bbe_eb6b_4f0a_ade2_813a6a149c36.slice/crio-fe12535a60fbd1bfe52cbc11fed65e58de42602ab7c140c4db57de75f98e8b87 WatchSource:0}: Error finding container fe12535a60fbd1bfe52cbc11fed65e58de42602ab7c140c4db57de75f98e8b87: Status 404 returned error can't find the container with id fe12535a60fbd1bfe52cbc11fed65e58de42602ab7c140c4db57de75f98e8b87 Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.573187 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6fc575d5d9-5s4f2"] Dec 12 14:13:41 crc kubenswrapper[4663]: W1212 14:13:41.576299 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b325b21_dfa8_4f7d_8896_2901013aec91.slice/crio-c49ea0671b6ba8eff47ba5b3e612b7470beb4447ea4b0e7aa4cf988bb9796ff8 WatchSource:0}: Error finding container c49ea0671b6ba8eff47ba5b3e612b7470beb4447ea4b0e7aa4cf988bb9796ff8: Status 404 returned error can't find the container with id c49ea0671b6ba8eff47ba5b3e612b7470beb4447ea4b0e7aa4cf988bb9796ff8 Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.857830 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" event={"ID":"e0838bcf-5ff7-49cc-a108-ac6c9f98b92c","Type":"ContainerStarted","Data":"563489c97e89958699dc27963d03923c667139f7157943eca17ff14ec4876397"} Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.858537 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" event={"ID":"19e4fc6a-b15f-4c91-9caa-08e836723adc","Type":"ContainerStarted","Data":"83cb2e04b5152590c670341dbfd9a9bd54b0279b16ca065d6acd2a58f4d292ca"} Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.859971 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fc575d5d9-5s4f2" event={"ID":"0b325b21-dfa8-4f7d-8896-2901013aec91","Type":"ContainerStarted","Data":"0641bb462be5855596f018c68a83deac21861b7e6b896065f4769ce3debf784f"} Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.860027 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fc575d5d9-5s4f2" event={"ID":"0b325b21-dfa8-4f7d-8896-2901013aec91","Type":"ContainerStarted","Data":"c49ea0671b6ba8eff47ba5b3e612b7470beb4447ea4b0e7aa4cf988bb9796ff8"} Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.860914 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-pg97r" event={"ID":"3e5fad3b-f0aa-4dd2-a485-d44a365605f1","Type":"ContainerStarted","Data":"046b504866fd21baccc717b18051cc90ca168413b6244dc5422b385b25df375f"} Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.861610 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" event={"ID":"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36","Type":"ContainerStarted","Data":"fe12535a60fbd1bfe52cbc11fed65e58de42602ab7c140c4db57de75f98e8b87"} Dec 12 14:13:41 crc kubenswrapper[4663]: I1212 14:13:41.873712 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6fc575d5d9-5s4f2" podStartSLOduration=1.873700896 podStartE2EDuration="1.873700896s" podCreationTimestamp="2025-12-12 14:13:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:13:41.870837751 +0000 UTC m=+601.296255805" watchObservedRunningTime="2025-12-12 14:13:41.873700896 +0000 UTC m=+601.299118951" Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.870764 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" event={"ID":"19e4fc6a-b15f-4c91-9caa-08e836723adc","Type":"ContainerStarted","Data":"5aee7798f28fc80d02b48978b7b9e695615651ac92e4b16ba718e0f1d15c8753"} Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.871859 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.873794 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-pg97r" event={"ID":"3e5fad3b-f0aa-4dd2-a485-d44a365605f1","Type":"ContainerStarted","Data":"ce9cd08ba27918896cb68859afa57ca60c5357fa3ae50e0b368a668cd496d913"} Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.873878 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.875543 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" event={"ID":"b62c8bbe-eb6b-4f0a-ade2-813a6a149c36","Type":"ContainerStarted","Data":"da16a7c6e1871b2046fa26d1b5dca6b0222cfe9e363b6d39f943efca7219c7d9"} Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.876666 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" event={"ID":"e0838bcf-5ff7-49cc-a108-ac6c9f98b92c","Type":"ContainerStarted","Data":"3ea9cfe0197503ef9b6cae66c5de56c25d696bfdfebd2f2749b8fa02e52bcafc"} Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.883767 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" podStartSLOduration=1.731441868 podStartE2EDuration="3.8837575s" podCreationTimestamp="2025-12-12 14:13:40 +0000 UTC" firstStartedPulling="2025-12-12 14:13:41.320255406 +0000 UTC m=+600.745673460" lastFinishedPulling="2025-12-12 14:13:43.472571038 +0000 UTC m=+602.897989092" observedRunningTime="2025-12-12 14:13:43.88090235 +0000 UTC m=+603.306320404" watchObservedRunningTime="2025-12-12 14:13:43.8837575 +0000 UTC m=+603.309175554" Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.896473 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-wp687" podStartSLOduration=1.797058778 podStartE2EDuration="3.896466789s" podCreationTimestamp="2025-12-12 14:13:40 +0000 UTC" firstStartedPulling="2025-12-12 14:13:41.368626262 +0000 UTC m=+600.794044316" lastFinishedPulling="2025-12-12 14:13:43.468034272 +0000 UTC m=+602.893452327" observedRunningTime="2025-12-12 14:13:43.892042035 +0000 UTC m=+603.317460090" watchObservedRunningTime="2025-12-12 14:13:43.896466789 +0000 UTC m=+603.321884844" Dec 12 14:13:43 crc kubenswrapper[4663]: I1212 14:13:43.910289 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-pg97r" podStartSLOduration=1.403530703 podStartE2EDuration="3.910275025s" podCreationTimestamp="2025-12-12 14:13:40 +0000 UTC" firstStartedPulling="2025-12-12 14:13:40.970180285 +0000 UTC m=+600.395598340" lastFinishedPulling="2025-12-12 14:13:43.476924607 +0000 UTC m=+602.902342662" observedRunningTime="2025-12-12 14:13:43.905982641 +0000 UTC m=+603.331400694" watchObservedRunningTime="2025-12-12 14:13:43.910275025 +0000 UTC m=+603.335693079" Dec 12 14:13:45 crc kubenswrapper[4663]: I1212 14:13:45.885160 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" event={"ID":"e0838bcf-5ff7-49cc-a108-ac6c9f98b92c","Type":"ContainerStarted","Data":"87f85837b73ed649a6902900b725513aa52b835ecfb3e4bb084092e6a0bd9508"} Dec 12 14:13:45 crc kubenswrapper[4663]: I1212 14:13:45.901125 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-v5c4t" podStartSLOduration=1.479471052 podStartE2EDuration="5.901111131s" podCreationTimestamp="2025-12-12 14:13:40 +0000 UTC" firstStartedPulling="2025-12-12 14:13:41.288051054 +0000 UTC m=+600.713469108" lastFinishedPulling="2025-12-12 14:13:45.709691133 +0000 UTC m=+605.135109187" observedRunningTime="2025-12-12 14:13:45.897179385 +0000 UTC m=+605.322597439" watchObservedRunningTime="2025-12-12 14:13:45.901111131 +0000 UTC m=+605.326529186" Dec 12 14:13:50 crc kubenswrapper[4663]: I1212 14:13:50.959855 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-pg97r" Dec 12 14:13:51 crc kubenswrapper[4663]: I1212 14:13:51.240265 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:51 crc kubenswrapper[4663]: I1212 14:13:51.240309 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:51 crc kubenswrapper[4663]: I1212 14:13:51.243306 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:51 crc kubenswrapper[4663]: I1212 14:13:51.909522 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6fc575d5d9-5s4f2" Dec 12 14:13:51 crc kubenswrapper[4663]: I1212 14:13:51.940874 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-f4vfm"] Dec 12 14:14:00 crc kubenswrapper[4663]: I1212 14:14:00.939937 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-dw2nz" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.282739 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j"] Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.284124 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.287411 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8ckr\" (UniqueName: \"kubernetes.io/projected/19b5b820-9d1f-4a41-ac3a-b6609a4be881-kube-api-access-z8ckr\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.287466 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.287491 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.288668 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.290730 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j"] Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.388195 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8ckr\" (UniqueName: \"kubernetes.io/projected/19b5b820-9d1f-4a41-ac3a-b6609a4be881-kube-api-access-z8ckr\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.388244 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.388271 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.388721 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.388765 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.403409 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8ckr\" (UniqueName: \"kubernetes.io/projected/19b5b820-9d1f-4a41-ac3a-b6609a4be881-kube-api-access-z8ckr\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.597665 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.934039 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j"] Dec 12 14:14:11 crc kubenswrapper[4663]: I1212 14:14:11.992380 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" event={"ID":"19b5b820-9d1f-4a41-ac3a-b6609a4be881","Type":"ContainerStarted","Data":"ec2d93ca45581887d477377aec92672ed19f5752347b98edba9bb3fd84d78d8e"} Dec 12 14:14:12 crc kubenswrapper[4663]: I1212 14:14:12.998927 4663 generic.go:334] "Generic (PLEG): container finished" podID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerID="c75d0b8ec1b38a28e13b36e35e5befa73a51816de2b3b2dc79cbbcad34d2fe24" exitCode=0 Dec 12 14:14:12 crc kubenswrapper[4663]: I1212 14:14:12.999104 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" event={"ID":"19b5b820-9d1f-4a41-ac3a-b6609a4be881","Type":"ContainerDied","Data":"c75d0b8ec1b38a28e13b36e35e5befa73a51816de2b3b2dc79cbbcad34d2fe24"} Dec 12 14:14:15 crc kubenswrapper[4663]: I1212 14:14:15.007708 4663 generic.go:334] "Generic (PLEG): container finished" podID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerID="4f74d8aa48f63040c1091b7ba38c9520149feba18f7f719d408342f2a936b9d7" exitCode=0 Dec 12 14:14:15 crc kubenswrapper[4663]: I1212 14:14:15.007764 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" event={"ID":"19b5b820-9d1f-4a41-ac3a-b6609a4be881","Type":"ContainerDied","Data":"4f74d8aa48f63040c1091b7ba38c9520149feba18f7f719d408342f2a936b9d7"} Dec 12 14:14:16 crc kubenswrapper[4663]: I1212 14:14:16.013665 4663 generic.go:334] "Generic (PLEG): container finished" podID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerID="69bd8949cf8710d60f3c8d2319e80368a2321096b76d1a222a5fb1de01348a94" exitCode=0 Dec 12 14:14:16 crc kubenswrapper[4663]: I1212 14:14:16.013767 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" event={"ID":"19b5b820-9d1f-4a41-ac3a-b6609a4be881","Type":"ContainerDied","Data":"69bd8949cf8710d60f3c8d2319e80368a2321096b76d1a222a5fb1de01348a94"} Dec 12 14:14:16 crc kubenswrapper[4663]: I1212 14:14:16.964011 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-f4vfm" podUID="6ac5aa32-7726-427e-99ee-19b74838d5b6" containerName="console" containerID="cri-o://6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b" gracePeriod=15 Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.178162 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.257992 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-f4vfm_6ac5aa32-7726-427e-99ee-19b74838d5b6/console/0.log" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.258175 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.348724 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-bundle\") pod \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.348837 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8ckr\" (UniqueName: \"kubernetes.io/projected/19b5b820-9d1f-4a41-ac3a-b6609a4be881-kube-api-access-z8ckr\") pod \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.348862 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-util\") pod \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\" (UID: \"19b5b820-9d1f-4a41-ac3a-b6609a4be881\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.349711 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-bundle" (OuterVolumeSpecName: "bundle") pod "19b5b820-9d1f-4a41-ac3a-b6609a4be881" (UID: "19b5b820-9d1f-4a41-ac3a-b6609a4be881"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.353005 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19b5b820-9d1f-4a41-ac3a-b6609a4be881-kube-api-access-z8ckr" (OuterVolumeSpecName: "kube-api-access-z8ckr") pod "19b5b820-9d1f-4a41-ac3a-b6609a4be881" (UID: "19b5b820-9d1f-4a41-ac3a-b6609a4be881"). InnerVolumeSpecName "kube-api-access-z8ckr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.449566 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-config\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.449624 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9x56\" (UniqueName: \"kubernetes.io/projected/6ac5aa32-7726-427e-99ee-19b74838d5b6-kube-api-access-p9x56\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.449659 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-service-ca\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.449681 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-serving-cert\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.449705 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-oauth-serving-cert\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.449723 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-oauth-config\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450265 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-config" (OuterVolumeSpecName: "console-config") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450297 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450314 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-service-ca" (OuterVolumeSpecName: "service-ca") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450385 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-trusted-ca-bundle\") pod \"6ac5aa32-7726-427e-99ee-19b74838d5b6\" (UID: \"6ac5aa32-7726-427e-99ee-19b74838d5b6\") " Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450587 4663 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450603 4663 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450611 4663 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-service-ca\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450619 4663 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450627 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8ckr\" (UniqueName: \"kubernetes.io/projected/19b5b820-9d1f-4a41-ac3a-b6609a4be881-kube-api-access-z8ckr\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.450896 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.452285 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.452726 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ac5aa32-7726-427e-99ee-19b74838d5b6-kube-api-access-p9x56" (OuterVolumeSpecName: "kube-api-access-p9x56") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "kube-api-access-p9x56". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.452979 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "6ac5aa32-7726-427e-99ee-19b74838d5b6" (UID: "6ac5aa32-7726-427e-99ee-19b74838d5b6"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.509546 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-util" (OuterVolumeSpecName: "util") pod "19b5b820-9d1f-4a41-ac3a-b6609a4be881" (UID: "19b5b820-9d1f-4a41-ac3a-b6609a4be881"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.551524 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9x56\" (UniqueName: \"kubernetes.io/projected/6ac5aa32-7726-427e-99ee-19b74838d5b6-kube-api-access-p9x56\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.551551 4663 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.551560 4663 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6ac5aa32-7726-427e-99ee-19b74838d5b6-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.551568 4663 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19b5b820-9d1f-4a41-ac3a-b6609a4be881-util\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:17 crc kubenswrapper[4663]: I1212 14:14:17.551577 4663 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac5aa32-7726-427e-99ee-19b74838d5b6-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.026169 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-f4vfm_6ac5aa32-7726-427e-99ee-19b74838d5b6/console/0.log" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.026214 4663 generic.go:334] "Generic (PLEG): container finished" podID="6ac5aa32-7726-427e-99ee-19b74838d5b6" containerID="6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b" exitCode=2 Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.026283 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f4vfm" event={"ID":"6ac5aa32-7726-427e-99ee-19b74838d5b6","Type":"ContainerDied","Data":"6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b"} Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.026289 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-f4vfm" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.026306 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-f4vfm" event={"ID":"6ac5aa32-7726-427e-99ee-19b74838d5b6","Type":"ContainerDied","Data":"e37d9b904cb1ea9ba71b6eef5d19f8336e0fb612a361e2fccf6b4ce4b49d2169"} Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.026321 4663 scope.go:117] "RemoveContainer" containerID="6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.028347 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" event={"ID":"19b5b820-9d1f-4a41-ac3a-b6609a4be881","Type":"ContainerDied","Data":"ec2d93ca45581887d477377aec92672ed19f5752347b98edba9bb3fd84d78d8e"} Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.028370 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec2d93ca45581887d477377aec92672ed19f5752347b98edba9bb3fd84d78d8e" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.028401 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.036304 4663 scope.go:117] "RemoveContainer" containerID="6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b" Dec 12 14:14:18 crc kubenswrapper[4663]: E1212 14:14:18.036565 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b\": container with ID starting with 6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b not found: ID does not exist" containerID="6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.036595 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b"} err="failed to get container status \"6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b\": rpc error: code = NotFound desc = could not find container \"6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b\": container with ID starting with 6b7cdea3614fb65b3b13d75b982db5566f7bc7162ceb2e02e53a9c70c2f1915b not found: ID does not exist" Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.046999 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-f4vfm"] Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.049845 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-f4vfm"] Dec 12 14:14:18 crc kubenswrapper[4663]: I1212 14:14:18.874712 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ac5aa32-7726-427e-99ee-19b74838d5b6" path="/var/lib/kubelet/pods/6ac5aa32-7726-427e-99ee-19b74838d5b6/volumes" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946269 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln"] Dec 12 14:14:26 crc kubenswrapper[4663]: E1212 14:14:26.946624 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="pull" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946636 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="pull" Dec 12 14:14:26 crc kubenswrapper[4663]: E1212 14:14:26.946645 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="util" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946652 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="util" Dec 12 14:14:26 crc kubenswrapper[4663]: E1212 14:14:26.946659 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ac5aa32-7726-427e-99ee-19b74838d5b6" containerName="console" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946665 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ac5aa32-7726-427e-99ee-19b74838d5b6" containerName="console" Dec 12 14:14:26 crc kubenswrapper[4663]: E1212 14:14:26.946672 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="extract" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946677 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="extract" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946777 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="19b5b820-9d1f-4a41-ac3a-b6609a4be881" containerName="extract" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.946790 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ac5aa32-7726-427e-99ee-19b74838d5b6" containerName="console" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.947087 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.948550 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.948799 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.948888 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.949495 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-hbs9x" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.950253 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 12 14:14:26 crc kubenswrapper[4663]: I1212 14:14:26.965803 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln"] Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.144671 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f208d44-68e4-40c1-803e-751fe8609d8a-webhook-cert\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.144733 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9crg7\" (UniqueName: \"kubernetes.io/projected/1f208d44-68e4-40c1-803e-751fe8609d8a-kube-api-access-9crg7\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.144802 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f208d44-68e4-40c1-803e-751fe8609d8a-apiservice-cert\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.246386 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f208d44-68e4-40c1-803e-751fe8609d8a-webhook-cert\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.246488 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9crg7\" (UniqueName: \"kubernetes.io/projected/1f208d44-68e4-40c1-803e-751fe8609d8a-kube-api-access-9crg7\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.246516 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f208d44-68e4-40c1-803e-751fe8609d8a-apiservice-cert\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.253477 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f208d44-68e4-40c1-803e-751fe8609d8a-apiservice-cert\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.253485 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f208d44-68e4-40c1-803e-751fe8609d8a-webhook-cert\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.259246 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9crg7\" (UniqueName: \"kubernetes.io/projected/1f208d44-68e4-40c1-803e-751fe8609d8a-kube-api-access-9crg7\") pod \"metallb-operator-controller-manager-97b7cbd45-gpcln\" (UID: \"1f208d44-68e4-40c1-803e-751fe8609d8a\") " pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.265262 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq"] Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.266249 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.269013 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-chzqv" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.269625 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.269729 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.288215 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq"] Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.449159 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fvc8\" (UniqueName: \"kubernetes.io/projected/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-kube-api-access-2fvc8\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.449233 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-apiservice-cert\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.449263 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-webhook-cert\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.550684 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fvc8\" (UniqueName: \"kubernetes.io/projected/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-kube-api-access-2fvc8\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.550902 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-apiservice-cert\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.550982 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-webhook-cert\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.553606 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-webhook-cert\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.553640 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-apiservice-cert\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.557898 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.564198 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fvc8\" (UniqueName: \"kubernetes.io/projected/6ee18c00-7a8c-4652-8bc8-ef4108f8d534-kube-api-access-2fvc8\") pod \"metallb-operator-webhook-server-5bdcb8754-8xmhq\" (UID: \"6ee18c00-7a8c-4652-8bc8-ef4108f8d534\") " pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.591884 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.752380 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq"] Dec 12 14:14:27 crc kubenswrapper[4663]: I1212 14:14:27.911205 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln"] Dec 12 14:14:27 crc kubenswrapper[4663]: W1212 14:14:27.915262 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f208d44_68e4_40c1_803e_751fe8609d8a.slice/crio-42a62cf897b13775402d2971188e79a3dc1ac85da80f2b2a0096509fdafe778e WatchSource:0}: Error finding container 42a62cf897b13775402d2971188e79a3dc1ac85da80f2b2a0096509fdafe778e: Status 404 returned error can't find the container with id 42a62cf897b13775402d2971188e79a3dc1ac85da80f2b2a0096509fdafe778e Dec 12 14:14:28 crc kubenswrapper[4663]: I1212 14:14:28.067731 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" event={"ID":"6ee18c00-7a8c-4652-8bc8-ef4108f8d534","Type":"ContainerStarted","Data":"5e17e6d23ddc953599a77744950dcf839d94b3ef970b87de66ddd8901816f2b2"} Dec 12 14:14:28 crc kubenswrapper[4663]: I1212 14:14:28.072831 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" event={"ID":"1f208d44-68e4-40c1-803e-751fe8609d8a","Type":"ContainerStarted","Data":"42a62cf897b13775402d2971188e79a3dc1ac85da80f2b2a0096509fdafe778e"} Dec 12 14:14:32 crc kubenswrapper[4663]: I1212 14:14:32.094408 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" event={"ID":"6ee18c00-7a8c-4652-8bc8-ef4108f8d534","Type":"ContainerStarted","Data":"ae146ba4096d2a3ba845a5f07213ee5eb2c3a38acb7b4a6054e708874c9b450a"} Dec 12 14:14:32 crc kubenswrapper[4663]: I1212 14:14:32.095388 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:14:32 crc kubenswrapper[4663]: I1212 14:14:32.095960 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" event={"ID":"1f208d44-68e4-40c1-803e-751fe8609d8a","Type":"ContainerStarted","Data":"164d87ad5a1ee9b8fbab5d15428c335757352551f081f43674608c384c9f1b6f"} Dec 12 14:14:32 crc kubenswrapper[4663]: I1212 14:14:32.096103 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:14:32 crc kubenswrapper[4663]: I1212 14:14:32.108086 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" podStartSLOduration=1.25760001 podStartE2EDuration="5.108073221s" podCreationTimestamp="2025-12-12 14:14:27 +0000 UTC" firstStartedPulling="2025-12-12 14:14:27.761261428 +0000 UTC m=+647.186679483" lastFinishedPulling="2025-12-12 14:14:31.61173464 +0000 UTC m=+651.037152694" observedRunningTime="2025-12-12 14:14:32.105512164 +0000 UTC m=+651.530930217" watchObservedRunningTime="2025-12-12 14:14:32.108073221 +0000 UTC m=+651.533491274" Dec 12 14:14:32 crc kubenswrapper[4663]: I1212 14:14:32.121046 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" podStartSLOduration=2.439322325 podStartE2EDuration="6.121034353s" podCreationTimestamp="2025-12-12 14:14:26 +0000 UTC" firstStartedPulling="2025-12-12 14:14:27.9181991 +0000 UTC m=+647.343617154" lastFinishedPulling="2025-12-12 14:14:31.599911127 +0000 UTC m=+651.025329182" observedRunningTime="2025-12-12 14:14:32.11832157 +0000 UTC m=+651.543739624" watchObservedRunningTime="2025-12-12 14:14:32.121034353 +0000 UTC m=+651.546452408" Dec 12 14:14:47 crc kubenswrapper[4663]: I1212 14:14:47.597232 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5bdcb8754-8xmhq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.133308 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq"] Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.134205 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.135771 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.136138 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.142789 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq"] Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.206541 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-secret-volume\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.206583 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpwgx\" (UniqueName: \"kubernetes.io/projected/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-kube-api-access-cpwgx\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.206610 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-config-volume\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.308161 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-config-volume\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.308250 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-secret-volume\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.308267 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpwgx\" (UniqueName: \"kubernetes.io/projected/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-kube-api-access-cpwgx\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.309199 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-config-volume\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.317351 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-secret-volume\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.338275 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpwgx\" (UniqueName: \"kubernetes.io/projected/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-kube-api-access-cpwgx\") pod \"collect-profiles-29425815-wzxcq\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.446446 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:00 crc kubenswrapper[4663]: I1212 14:15:00.782341 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq"] Dec 12 14:15:01 crc kubenswrapper[4663]: I1212 14:15:01.213346 4663 generic.go:334] "Generic (PLEG): container finished" podID="b2a31418-f2e4-48a4-b026-f4987fb0cc0e" containerID="cc9c87a2c60590682ab53c8ee51f3afaf99d9e962c041e5a04036d30611fbdf4" exitCode=0 Dec 12 14:15:01 crc kubenswrapper[4663]: I1212 14:15:01.213437 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" event={"ID":"b2a31418-f2e4-48a4-b026-f4987fb0cc0e","Type":"ContainerDied","Data":"cc9c87a2c60590682ab53c8ee51f3afaf99d9e962c041e5a04036d30611fbdf4"} Dec 12 14:15:01 crc kubenswrapper[4663]: I1212 14:15:01.213564 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" event={"ID":"b2a31418-f2e4-48a4-b026-f4987fb0cc0e","Type":"ContainerStarted","Data":"0ebba9e974e25661e39f3715ea6fb7957e0268fcc2f4951a1402fddd63aa1a7a"} Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.396718 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.431684 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-config-volume\") pod \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.431824 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpwgx\" (UniqueName: \"kubernetes.io/projected/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-kube-api-access-cpwgx\") pod \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.431850 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-secret-volume\") pod \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\" (UID: \"b2a31418-f2e4-48a4-b026-f4987fb0cc0e\") " Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.432380 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-config-volume" (OuterVolumeSpecName: "config-volume") pod "b2a31418-f2e4-48a4-b026-f4987fb0cc0e" (UID: "b2a31418-f2e4-48a4-b026-f4987fb0cc0e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.436020 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-kube-api-access-cpwgx" (OuterVolumeSpecName: "kube-api-access-cpwgx") pod "b2a31418-f2e4-48a4-b026-f4987fb0cc0e" (UID: "b2a31418-f2e4-48a4-b026-f4987fb0cc0e"). InnerVolumeSpecName "kube-api-access-cpwgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.436069 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b2a31418-f2e4-48a4-b026-f4987fb0cc0e" (UID: "b2a31418-f2e4-48a4-b026-f4987fb0cc0e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.533623 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpwgx\" (UniqueName: \"kubernetes.io/projected/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-kube-api-access-cpwgx\") on node \"crc\" DevicePath \"\"" Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.533649 4663 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:15:02 crc kubenswrapper[4663]: I1212 14:15:02.533659 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b2a31418-f2e4-48a4-b026-f4987fb0cc0e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:15:03 crc kubenswrapper[4663]: I1212 14:15:03.221999 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" event={"ID":"b2a31418-f2e4-48a4-b026-f4987fb0cc0e","Type":"ContainerDied","Data":"0ebba9e974e25661e39f3715ea6fb7957e0268fcc2f4951a1402fddd63aa1a7a"} Dec 12 14:15:03 crc kubenswrapper[4663]: I1212 14:15:03.222032 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ebba9e974e25661e39f3715ea6fb7957e0268fcc2f4951a1402fddd63aa1a7a" Dec 12 14:15:03 crc kubenswrapper[4663]: I1212 14:15:03.222078 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425815-wzxcq" Dec 12 14:15:06 crc kubenswrapper[4663]: I1212 14:15:06.128576 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:15:06 crc kubenswrapper[4663]: I1212 14:15:06.128789 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:15:07 crc kubenswrapper[4663]: I1212 14:15:07.560497 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-97b7cbd45-gpcln" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.106589 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-m5rz7"] Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.106947 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2a31418-f2e4-48a4-b026-f4987fb0cc0e" containerName="collect-profiles" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.107017 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2a31418-f2e4-48a4-b026-f4987fb0cc0e" containerName="collect-profiles" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.107158 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2a31418-f2e4-48a4-b026-f4987fb0cc0e" containerName="collect-profiles" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.108810 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.110146 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.110290 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.110359 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mjdm4" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.122577 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw"] Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.123140 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.124777 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.143682 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw"] Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186513 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-ktlsq"] Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186612 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-metrics\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186648 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5794\" (UniqueName: \"kubernetes.io/projected/ed966777-b283-45df-b619-93ff7eac79d2-kube-api-access-t5794\") pod \"frr-k8s-webhook-server-7784b6fcf-twfrw\" (UID: \"ed966777-b283-45df-b619-93ff7eac79d2\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186666 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6kzv\" (UniqueName: \"kubernetes.io/projected/1d2f5585-abc1-4104-8d45-9e3ff2976653-kube-api-access-g6kzv\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186697 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-startup\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186838 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ed966777-b283-45df-b619-93ff7eac79d2-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-twfrw\" (UID: \"ed966777-b283-45df-b619-93ff7eac79d2\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186879 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-sockets\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186900 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1d2f5585-abc1-4104-8d45-9e3ff2976653-metrics-certs\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186955 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-reloader\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.186975 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-conf\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.187217 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.191024 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-w4rbp" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.191340 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.191377 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.193015 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.201073 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-d267t"] Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.201785 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.204497 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.220527 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-d267t"] Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287673 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-sockets\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287718 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzfsm\" (UniqueName: \"kubernetes.io/projected/ba53dfba-45e3-4ac2-93ca-67772a7acdef-kube-api-access-gzfsm\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287736 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1d2f5585-abc1-4104-8d45-9e3ff2976653-metrics-certs\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287774 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4vxw\" (UniqueName: \"kubernetes.io/projected/95cf0800-d909-4e76-87c9-c41e4df21a56-kube-api-access-n4vxw\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287792 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba53dfba-45e3-4ac2-93ca-67772a7acdef-metrics-certs\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287832 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba53dfba-45e3-4ac2-93ca-67772a7acdef-cert\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287849 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-reloader\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287864 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-conf\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287880 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287912 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-metrics\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287930 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5794\" (UniqueName: \"kubernetes.io/projected/ed966777-b283-45df-b619-93ff7eac79d2-kube-api-access-t5794\") pod \"frr-k8s-webhook-server-7784b6fcf-twfrw\" (UID: \"ed966777-b283-45df-b619-93ff7eac79d2\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287946 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6kzv\" (UniqueName: \"kubernetes.io/projected/1d2f5585-abc1-4104-8d45-9e3ff2976653-kube-api-access-g6kzv\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287966 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-metrics-certs\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.287989 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-startup\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.288007 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/95cf0800-d909-4e76-87c9-c41e4df21a56-metallb-excludel2\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.288024 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ed966777-b283-45df-b619-93ff7eac79d2-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-twfrw\" (UID: \"ed966777-b283-45df-b619-93ff7eac79d2\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.288430 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-sockets\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.288477 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-reloader\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.288770 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-metrics\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.288988 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-conf\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.289077 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/1d2f5585-abc1-4104-8d45-9e3ff2976653-frr-startup\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.295097 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1d2f5585-abc1-4104-8d45-9e3ff2976653-metrics-certs\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.298260 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ed966777-b283-45df-b619-93ff7eac79d2-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-twfrw\" (UID: \"ed966777-b283-45df-b619-93ff7eac79d2\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.300238 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5794\" (UniqueName: \"kubernetes.io/projected/ed966777-b283-45df-b619-93ff7eac79d2-kube-api-access-t5794\") pod \"frr-k8s-webhook-server-7784b6fcf-twfrw\" (UID: \"ed966777-b283-45df-b619-93ff7eac79d2\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.301804 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6kzv\" (UniqueName: \"kubernetes.io/projected/1d2f5585-abc1-4104-8d45-9e3ff2976653-kube-api-access-g6kzv\") pod \"frr-k8s-m5rz7\" (UID: \"1d2f5585-abc1-4104-8d45-9e3ff2976653\") " pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388681 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-metrics-certs\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388739 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/95cf0800-d909-4e76-87c9-c41e4df21a56-metallb-excludel2\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388779 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzfsm\" (UniqueName: \"kubernetes.io/projected/ba53dfba-45e3-4ac2-93ca-67772a7acdef-kube-api-access-gzfsm\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388800 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4vxw\" (UniqueName: \"kubernetes.io/projected/95cf0800-d909-4e76-87c9-c41e4df21a56-kube-api-access-n4vxw\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388818 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba53dfba-45e3-4ac2-93ca-67772a7acdef-metrics-certs\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388834 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba53dfba-45e3-4ac2-93ca-67772a7acdef-cert\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.388838 4663 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.388854 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.388891 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-metrics-certs podName:95cf0800-d909-4e76-87c9-c41e4df21a56 nodeName:}" failed. No retries permitted until 2025-12-12 14:15:08.888877352 +0000 UTC m=+688.314295406 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-metrics-certs") pod "speaker-ktlsq" (UID: "95cf0800-d909-4e76-87c9-c41e4df21a56") : secret "speaker-certs-secret" not found Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.388961 4663 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.389003 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist podName:95cf0800-d909-4e76-87c9-c41e4df21a56 nodeName:}" failed. No retries permitted until 2025-12-12 14:15:08.88898779 +0000 UTC m=+688.314405844 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist") pod "speaker-ktlsq" (UID: "95cf0800-d909-4e76-87c9-c41e4df21a56") : secret "metallb-memberlist" not found Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.389425 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/95cf0800-d909-4e76-87c9-c41e4df21a56-metallb-excludel2\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.390212 4663 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.391800 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba53dfba-45e3-4ac2-93ca-67772a7acdef-metrics-certs\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.401595 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ba53dfba-45e3-4ac2-93ca-67772a7acdef-cert\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.402325 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzfsm\" (UniqueName: \"kubernetes.io/projected/ba53dfba-45e3-4ac2-93ca-67772a7acdef-kube-api-access-gzfsm\") pod \"controller-5bddd4b946-d267t\" (UID: \"ba53dfba-45e3-4ac2-93ca-67772a7acdef\") " pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.404349 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4vxw\" (UniqueName: \"kubernetes.io/projected/95cf0800-d909-4e76-87c9-c41e4df21a56-kube-api-access-n4vxw\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.420932 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.436956 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.511798 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.773399 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw"] Dec 12 14:15:08 crc kubenswrapper[4663]: W1212 14:15:08.776025 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded966777_b283_45df_b619_93ff7eac79d2.slice/crio-e22b00bf0fd704a0d2c72aaa231dcf8e9749aa2756b9fd16d2d0270e82eaeb8d WatchSource:0}: Error finding container e22b00bf0fd704a0d2c72aaa231dcf8e9749aa2756b9fd16d2d0270e82eaeb8d: Status 404 returned error can't find the container with id e22b00bf0fd704a0d2c72aaa231dcf8e9749aa2756b9fd16d2d0270e82eaeb8d Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.863297 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-d267t"] Dec 12 14:15:08 crc kubenswrapper[4663]: W1212 14:15:08.866108 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba53dfba_45e3_4ac2_93ca_67772a7acdef.slice/crio-ace65f4046c7c0ae207e6a993f3daa2acf4e4e6f366291c29b39735ba250310e WatchSource:0}: Error finding container ace65f4046c7c0ae207e6a993f3daa2acf4e4e6f366291c29b39735ba250310e: Status 404 returned error can't find the container with id ace65f4046c7c0ae207e6a993f3daa2acf4e4e6f366291c29b39735ba250310e Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.892836 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.892890 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-metrics-certs\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.892977 4663 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 12 14:15:08 crc kubenswrapper[4663]: E1212 14:15:08.893721 4663 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist podName:95cf0800-d909-4e76-87c9-c41e4df21a56 nodeName:}" failed. No retries permitted until 2025-12-12 14:15:09.893702658 +0000 UTC m=+689.319120711 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist") pod "speaker-ktlsq" (UID: "95cf0800-d909-4e76-87c9-c41e4df21a56") : secret "metallb-memberlist" not found Dec 12 14:15:08 crc kubenswrapper[4663]: I1212 14:15:08.897607 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-metrics-certs\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.245646 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" event={"ID":"ed966777-b283-45df-b619-93ff7eac79d2","Type":"ContainerStarted","Data":"e22b00bf0fd704a0d2c72aaa231dcf8e9749aa2756b9fd16d2d0270e82eaeb8d"} Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.247073 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-d267t" event={"ID":"ba53dfba-45e3-4ac2-93ca-67772a7acdef","Type":"ContainerStarted","Data":"a9cf1297f7d86d3e5cc85c965f4f75c473238dab35592761ce83206cca5be9e3"} Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.247111 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-d267t" event={"ID":"ba53dfba-45e3-4ac2-93ca-67772a7acdef","Type":"ContainerStarted","Data":"c5a4c75b7eb0f582b8d00751b4e211d92c8608d2dff8807d6bfbc33baf103db1"} Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.247121 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-d267t" event={"ID":"ba53dfba-45e3-4ac2-93ca-67772a7acdef","Type":"ContainerStarted","Data":"ace65f4046c7c0ae207e6a993f3daa2acf4e4e6f366291c29b39735ba250310e"} Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.247238 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.247972 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"c05f5a4cb19fd10d5032b59e807279388708135844179b17bb1163d5abc1dbe3"} Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.259686 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-d267t" podStartSLOduration=1.259674231 podStartE2EDuration="1.259674231s" podCreationTimestamp="2025-12-12 14:15:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:15:09.257422066 +0000 UTC m=+688.682840121" watchObservedRunningTime="2025-12-12 14:15:09.259674231 +0000 UTC m=+688.685092286" Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.902594 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.906170 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/95cf0800-d909-4e76-87c9-c41e4df21a56-memberlist\") pod \"speaker-ktlsq\" (UID: \"95cf0800-d909-4e76-87c9-c41e4df21a56\") " pod="metallb-system/speaker-ktlsq" Dec 12 14:15:09 crc kubenswrapper[4663]: I1212 14:15:09.997080 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-ktlsq" Dec 12 14:15:10 crc kubenswrapper[4663]: W1212 14:15:10.025979 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95cf0800_d909_4e76_87c9_c41e4df21a56.slice/crio-8ae34efaff568767427859459d9c4a78c79cdab52f283d0e3bb83c6db812464e WatchSource:0}: Error finding container 8ae34efaff568767427859459d9c4a78c79cdab52f283d0e3bb83c6db812464e: Status 404 returned error can't find the container with id 8ae34efaff568767427859459d9c4a78c79cdab52f283d0e3bb83c6db812464e Dec 12 14:15:10 crc kubenswrapper[4663]: I1212 14:15:10.254933 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-ktlsq" event={"ID":"95cf0800-d909-4e76-87c9-c41e4df21a56","Type":"ContainerStarted","Data":"b9f6bf1be5d397be8c65e266c90a510ec685d173096576ccf8098f56dd3e53e5"} Dec 12 14:15:10 crc kubenswrapper[4663]: I1212 14:15:10.254969 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-ktlsq" event={"ID":"95cf0800-d909-4e76-87c9-c41e4df21a56","Type":"ContainerStarted","Data":"8ae34efaff568767427859459d9c4a78c79cdab52f283d0e3bb83c6db812464e"} Dec 12 14:15:11 crc kubenswrapper[4663]: I1212 14:15:11.263392 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-ktlsq" event={"ID":"95cf0800-d909-4e76-87c9-c41e4df21a56","Type":"ContainerStarted","Data":"16ce6346e5be21ac097ae8157f91b76170bcf064cb8c05ccf740c5c4bcc97c26"} Dec 12 14:15:11 crc kubenswrapper[4663]: I1212 14:15:11.264282 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-ktlsq" Dec 12 14:15:11 crc kubenswrapper[4663]: I1212 14:15:11.285522 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-ktlsq" podStartSLOduration=3.285505423 podStartE2EDuration="3.285505423s" podCreationTimestamp="2025-12-12 14:15:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:15:11.284916823 +0000 UTC m=+690.710334877" watchObservedRunningTime="2025-12-12 14:15:11.285505423 +0000 UTC m=+690.710923477" Dec 12 14:15:15 crc kubenswrapper[4663]: I1212 14:15:15.284010 4663 generic.go:334] "Generic (PLEG): container finished" podID="1d2f5585-abc1-4104-8d45-9e3ff2976653" containerID="9bd0135379422d8c44a02e310e2581b947efe111f0603d2084ff4cbf9a439cc3" exitCode=0 Dec 12 14:15:15 crc kubenswrapper[4663]: I1212 14:15:15.284100 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerDied","Data":"9bd0135379422d8c44a02e310e2581b947efe111f0603d2084ff4cbf9a439cc3"} Dec 12 14:15:15 crc kubenswrapper[4663]: I1212 14:15:15.286538 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" event={"ID":"ed966777-b283-45df-b619-93ff7eac79d2","Type":"ContainerStarted","Data":"26789af48605c880bfa6044d60dbe0d03b3894840001332a73a1732ea49c6c6f"} Dec 12 14:15:15 crc kubenswrapper[4663]: I1212 14:15:15.286666 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:15 crc kubenswrapper[4663]: I1212 14:15:15.344504 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" podStartSLOduration=1.895874109 podStartE2EDuration="7.344475459s" podCreationTimestamp="2025-12-12 14:15:08 +0000 UTC" firstStartedPulling="2025-12-12 14:15:08.777593147 +0000 UTC m=+688.203011201" lastFinishedPulling="2025-12-12 14:15:14.226194497 +0000 UTC m=+693.651612551" observedRunningTime="2025-12-12 14:15:15.340844746 +0000 UTC m=+694.766262800" watchObservedRunningTime="2025-12-12 14:15:15.344475459 +0000 UTC m=+694.769893513" Dec 12 14:15:16 crc kubenswrapper[4663]: I1212 14:15:16.292928 4663 generic.go:334] "Generic (PLEG): container finished" podID="1d2f5585-abc1-4104-8d45-9e3ff2976653" containerID="c427e2d77344d6688e77faea1fa0b9723181237d3fbba735c550e5797bc461ee" exitCode=0 Dec 12 14:15:16 crc kubenswrapper[4663]: I1212 14:15:16.292991 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerDied","Data":"c427e2d77344d6688e77faea1fa0b9723181237d3fbba735c550e5797bc461ee"} Dec 12 14:15:17 crc kubenswrapper[4663]: I1212 14:15:17.299089 4663 generic.go:334] "Generic (PLEG): container finished" podID="1d2f5585-abc1-4104-8d45-9e3ff2976653" containerID="92369c2cdf738064f626a8c399d1007b72f97f1159dcae2cde72d85c4e1064c5" exitCode=0 Dec 12 14:15:17 crc kubenswrapper[4663]: I1212 14:15:17.299259 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerDied","Data":"92369c2cdf738064f626a8c399d1007b72f97f1159dcae2cde72d85c4e1064c5"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.307054 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"39bc99c55024fb284506ef1291c78143506c03b90685312a1d1b2910c6c411da"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.307879 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"a5313a88fa91a30d85da57bbb53596569ffa46a08b13d51335c15f3fb27165be"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.307950 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.308012 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"89d9a521c0aeb50a747726e5b5441a5ea8a923b5d5e884751051afcfde1fbf31"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.308065 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"775d93915ae35cb05329c64357f53a79d2b59cee7894501771571e43d8e17956"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.308113 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"71d419d7c83cbbf0829a1fe5e506b43d08dda3514a17feec0a36565abc41d773"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.308167 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-m5rz7" event={"ID":"1d2f5585-abc1-4104-8d45-9e3ff2976653","Type":"ContainerStarted","Data":"e98e7b0d5467bc86454d0c22c28880a3d3f328ff436497c2047500cb752dc3d0"} Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.324791 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-m5rz7" podStartSLOduration=4.592860739 podStartE2EDuration="10.324778523s" podCreationTimestamp="2025-12-12 14:15:08 +0000 UTC" firstStartedPulling="2025-12-12 14:15:08.48925121 +0000 UTC m=+687.914669264" lastFinishedPulling="2025-12-12 14:15:14.221168994 +0000 UTC m=+693.646587048" observedRunningTime="2025-12-12 14:15:18.322596289 +0000 UTC m=+697.748014343" watchObservedRunningTime="2025-12-12 14:15:18.324778523 +0000 UTC m=+697.750196577" Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.421797 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.449789 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:18 crc kubenswrapper[4663]: I1212 14:15:18.514388 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-d267t" Dec 12 14:15:28 crc kubenswrapper[4663]: I1212 14:15:28.423045 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-m5rz7" Dec 12 14:15:28 crc kubenswrapper[4663]: I1212 14:15:28.441273 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-twfrw" Dec 12 14:15:29 crc kubenswrapper[4663]: I1212 14:15:29.999436 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-ktlsq" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.774060 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-kjxdf"] Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.774959 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.776437 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.776445 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-llcdv" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.776915 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.780000 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6zqm\" (UniqueName: \"kubernetes.io/projected/8bfc79c2-6155-477f-987f-c45e5b73309e-kube-api-access-j6zqm\") pod \"openstack-operator-index-kjxdf\" (UID: \"8bfc79c2-6155-477f-987f-c45e5b73309e\") " pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.780532 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kjxdf"] Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.881525 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6zqm\" (UniqueName: \"kubernetes.io/projected/8bfc79c2-6155-477f-987f-c45e5b73309e-kube-api-access-j6zqm\") pod \"openstack-operator-index-kjxdf\" (UID: \"8bfc79c2-6155-477f-987f-c45e5b73309e\") " pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:15:35 crc kubenswrapper[4663]: I1212 14:15:35.896328 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6zqm\" (UniqueName: \"kubernetes.io/projected/8bfc79c2-6155-477f-987f-c45e5b73309e-kube-api-access-j6zqm\") pod \"openstack-operator-index-kjxdf\" (UID: \"8bfc79c2-6155-477f-987f-c45e5b73309e\") " pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:15:36 crc kubenswrapper[4663]: I1212 14:15:36.087760 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:15:36 crc kubenswrapper[4663]: I1212 14:15:36.129065 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:15:36 crc kubenswrapper[4663]: I1212 14:15:36.129133 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:15:36 crc kubenswrapper[4663]: I1212 14:15:36.419933 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kjxdf"] Dec 12 14:15:36 crc kubenswrapper[4663]: W1212 14:15:36.423645 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8bfc79c2_6155_477f_987f_c45e5b73309e.slice/crio-46e2a7439e25fdafa4c19b1fe3655932c3507e3ace6a2867c739b1bea008b638 WatchSource:0}: Error finding container 46e2a7439e25fdafa4c19b1fe3655932c3507e3ace6a2867c739b1bea008b638: Status 404 returned error can't find the container with id 46e2a7439e25fdafa4c19b1fe3655932c3507e3ace6a2867c739b1bea008b638 Dec 12 14:15:37 crc kubenswrapper[4663]: I1212 14:15:37.394222 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kjxdf" event={"ID":"8bfc79c2-6155-477f-987f-c45e5b73309e","Type":"ContainerStarted","Data":"46e2a7439e25fdafa4c19b1fe3655932c3507e3ace6a2867c739b1bea008b638"} Dec 12 14:15:40 crc kubenswrapper[4663]: I1212 14:15:40.969457 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-kjxdf"] Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.571203 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-7p228"] Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.571804 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7p228" Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.577596 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7p228"] Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.639947 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vc6l\" (UniqueName: \"kubernetes.io/projected/d196ad18-5271-4392-a785-c4231f5d1cea-kube-api-access-5vc6l\") pod \"openstack-operator-index-7p228\" (UID: \"d196ad18-5271-4392-a785-c4231f5d1cea\") " pod="openstack-operators/openstack-operator-index-7p228" Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.741646 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vc6l\" (UniqueName: \"kubernetes.io/projected/d196ad18-5271-4392-a785-c4231f5d1cea-kube-api-access-5vc6l\") pod \"openstack-operator-index-7p228\" (UID: \"d196ad18-5271-4392-a785-c4231f5d1cea\") " pod="openstack-operators/openstack-operator-index-7p228" Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.755769 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vc6l\" (UniqueName: \"kubernetes.io/projected/d196ad18-5271-4392-a785-c4231f5d1cea-kube-api-access-5vc6l\") pod \"openstack-operator-index-7p228\" (UID: \"d196ad18-5271-4392-a785-c4231f5d1cea\") " pod="openstack-operators/openstack-operator-index-7p228" Dec 12 14:15:41 crc kubenswrapper[4663]: I1212 14:15:41.882572 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7p228" Dec 12 14:15:42 crc kubenswrapper[4663]: I1212 14:15:42.213842 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7p228"] Dec 12 14:15:42 crc kubenswrapper[4663]: W1212 14:15:42.218830 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd196ad18_5271_4392_a785_c4231f5d1cea.slice/crio-0b7c01e1cab4daf2a526f2dae49eed60a980329065d05a60ab8f6e9b29670e60 WatchSource:0}: Error finding container 0b7c01e1cab4daf2a526f2dae49eed60a980329065d05a60ab8f6e9b29670e60: Status 404 returned error can't find the container with id 0b7c01e1cab4daf2a526f2dae49eed60a980329065d05a60ab8f6e9b29670e60 Dec 12 14:15:42 crc kubenswrapper[4663]: I1212 14:15:42.415031 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7p228" event={"ID":"d196ad18-5271-4392-a785-c4231f5d1cea","Type":"ContainerStarted","Data":"0b7c01e1cab4daf2a526f2dae49eed60a980329065d05a60ab8f6e9b29670e60"} Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.128937 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.129303 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.129357 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.129862 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c5ebe9bf5e9a3e64cdedfa80ce55ee02c552fa8f4a03ac34da20964a3cfae7b"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.129929 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://5c5ebe9bf5e9a3e64cdedfa80ce55ee02c552fa8f4a03ac34da20964a3cfae7b" gracePeriod=600 Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.508411 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="5c5ebe9bf5e9a3e64cdedfa80ce55ee02c552fa8f4a03ac34da20964a3cfae7b" exitCode=0 Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.508484 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"5c5ebe9bf5e9a3e64cdedfa80ce55ee02c552fa8f4a03ac34da20964a3cfae7b"} Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.508758 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"3587f0b453a98e1b08acc442413a0bc680bac151e3b0ee99db93fabf4dbe76f2"} Dec 12 14:16:06 crc kubenswrapper[4663]: I1212 14:16:06.508836 4663 scope.go:117] "RemoveContainer" containerID="fdd804f3d5e00648af4f1a04594dd26f7076d16b07130cdcecfe1b8605b9a2f9" Dec 12 14:16:17 crc kubenswrapper[4663]: I1212 14:16:17.420608 4663 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 12 14:17:36 crc kubenswrapper[4663]: E1212 14:17:36.429675 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:17:36 crc kubenswrapper[4663]: E1212 14:17:36.430097 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:17:36 crc kubenswrapper[4663]: E1212 14:17:36.430219 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j6zqm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-kjxdf_openstack-operators(8bfc79c2-6155-477f-987f-c45e5b73309e): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:17:36 crc kubenswrapper[4663]: E1212 14:17:36.431403 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-kjxdf" podUID="8bfc79c2-6155-477f-987f-c45e5b73309e" Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.034202 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.170614 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6zqm\" (UniqueName: \"kubernetes.io/projected/8bfc79c2-6155-477f-987f-c45e5b73309e-kube-api-access-j6zqm\") pod \"8bfc79c2-6155-477f-987f-c45e5b73309e\" (UID: \"8bfc79c2-6155-477f-987f-c45e5b73309e\") " Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.174584 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bfc79c2-6155-477f-987f-c45e5b73309e-kube-api-access-j6zqm" (OuterVolumeSpecName: "kube-api-access-j6zqm") pod "8bfc79c2-6155-477f-987f-c45e5b73309e" (UID: "8bfc79c2-6155-477f-987f-c45e5b73309e"). InnerVolumeSpecName "kube-api-access-j6zqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.271405 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6zqm\" (UniqueName: \"kubernetes.io/projected/8bfc79c2-6155-477f-987f-c45e5b73309e-kube-api-access-j6zqm\") on node \"crc\" DevicePath \"\"" Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.862791 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kjxdf" event={"ID":"8bfc79c2-6155-477f-987f-c45e5b73309e","Type":"ContainerDied","Data":"46e2a7439e25fdafa4c19b1fe3655932c3507e3ace6a2867c739b1bea008b638"} Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.862819 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kjxdf" Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.890812 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-kjxdf"] Dec 12 14:17:37 crc kubenswrapper[4663]: I1212 14:17:37.893551 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-kjxdf"] Dec 12 14:17:38 crc kubenswrapper[4663]: I1212 14:17:38.874794 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bfc79c2-6155-477f-987f-c45e5b73309e" path="/var/lib/kubelet/pods/8bfc79c2-6155-477f-987f-c45e5b73309e/volumes" Dec 12 14:17:42 crc kubenswrapper[4663]: E1212 14:17:42.225169 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:17:42 crc kubenswrapper[4663]: E1212 14:17:42.225365 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:17:42 crc kubenswrapper[4663]: E1212 14:17:42.225469 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-7p228_openstack-operators(d196ad18-5271-4392-a785-c4231f5d1cea): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:17:42 crc kubenswrapper[4663]: E1212 14:17:42.226630 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:17:42 crc kubenswrapper[4663]: E1212 14:17:42.889590 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.451586 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l8vxk"] Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.453589 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.457710 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l8vxk"] Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.544130 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-utilities\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.544180 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2dnx\" (UniqueName: \"kubernetes.io/projected/95873577-a4a4-41dc-8d78-97f9ee24a7d5-kube-api-access-v2dnx\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.544221 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-catalog-content\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.645045 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-utilities\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.645089 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2dnx\" (UniqueName: \"kubernetes.io/projected/95873577-a4a4-41dc-8d78-97f9ee24a7d5-kube-api-access-v2dnx\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.645154 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-catalog-content\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.645548 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-utilities\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.645673 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-catalog-content\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.660843 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2dnx\" (UniqueName: \"kubernetes.io/projected/95873577-a4a4-41dc-8d78-97f9ee24a7d5-kube-api-access-v2dnx\") pod \"community-operators-l8vxk\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:44 crc kubenswrapper[4663]: I1212 14:17:44.768064 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:45 crc kubenswrapper[4663]: I1212 14:17:45.121486 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l8vxk"] Dec 12 14:17:45 crc kubenswrapper[4663]: I1212 14:17:45.902432 4663 generic.go:334] "Generic (PLEG): container finished" podID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerID="14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769" exitCode=0 Dec 12 14:17:45 crc kubenswrapper[4663]: I1212 14:17:45.902551 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l8vxk" event={"ID":"95873577-a4a4-41dc-8d78-97f9ee24a7d5","Type":"ContainerDied","Data":"14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769"} Dec 12 14:17:45 crc kubenswrapper[4663]: I1212 14:17:45.902712 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l8vxk" event={"ID":"95873577-a4a4-41dc-8d78-97f9ee24a7d5","Type":"ContainerStarted","Data":"dc3797ff6796fd62c1264c11ac0e2bb8f28fb4f2910a7f51ea62f439751380da"} Dec 12 14:17:46 crc kubenswrapper[4663]: I1212 14:17:46.907528 4663 generic.go:334] "Generic (PLEG): container finished" podID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerID="e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05" exitCode=0 Dec 12 14:17:46 crc kubenswrapper[4663]: I1212 14:17:46.907587 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l8vxk" event={"ID":"95873577-a4a4-41dc-8d78-97f9ee24a7d5","Type":"ContainerDied","Data":"e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05"} Dec 12 14:17:47 crc kubenswrapper[4663]: I1212 14:17:47.916023 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l8vxk" event={"ID":"95873577-a4a4-41dc-8d78-97f9ee24a7d5","Type":"ContainerStarted","Data":"6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea"} Dec 12 14:17:47 crc kubenswrapper[4663]: I1212 14:17:47.929707 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l8vxk" podStartSLOduration=2.360477839 podStartE2EDuration="3.929693513s" podCreationTimestamp="2025-12-12 14:17:44 +0000 UTC" firstStartedPulling="2025-12-12 14:17:45.903573985 +0000 UTC m=+845.328992039" lastFinishedPulling="2025-12-12 14:17:47.472789659 +0000 UTC m=+846.898207713" observedRunningTime="2025-12-12 14:17:47.927331757 +0000 UTC m=+847.352749810" watchObservedRunningTime="2025-12-12 14:17:47.929693513 +0000 UTC m=+847.355111567" Dec 12 14:17:54 crc kubenswrapper[4663]: I1212 14:17:54.768437 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:54 crc kubenswrapper[4663]: I1212 14:17:54.768712 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:54 crc kubenswrapper[4663]: I1212 14:17:54.794847 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:54 crc kubenswrapper[4663]: I1212 14:17:54.975169 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:55 crc kubenswrapper[4663]: I1212 14:17:55.013917 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l8vxk"] Dec 12 14:17:56 crc kubenswrapper[4663]: I1212 14:17:56.871022 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 12 14:17:56 crc kubenswrapper[4663]: I1212 14:17:56.958830 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l8vxk" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="registry-server" containerID="cri-o://6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea" gracePeriod=2 Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.730597 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.872183 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-catalog-content\") pod \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.872230 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-utilities\") pod \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.872270 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2dnx\" (UniqueName: \"kubernetes.io/projected/95873577-a4a4-41dc-8d78-97f9ee24a7d5-kube-api-access-v2dnx\") pod \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\" (UID: \"95873577-a4a4-41dc-8d78-97f9ee24a7d5\") " Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.872986 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-utilities" (OuterVolumeSpecName: "utilities") pod "95873577-a4a4-41dc-8d78-97f9ee24a7d5" (UID: "95873577-a4a4-41dc-8d78-97f9ee24a7d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.876126 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95873577-a4a4-41dc-8d78-97f9ee24a7d5-kube-api-access-v2dnx" (OuterVolumeSpecName: "kube-api-access-v2dnx") pod "95873577-a4a4-41dc-8d78-97f9ee24a7d5" (UID: "95873577-a4a4-41dc-8d78-97f9ee24a7d5"). InnerVolumeSpecName "kube-api-access-v2dnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.908254 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95873577-a4a4-41dc-8d78-97f9ee24a7d5" (UID: "95873577-a4a4-41dc-8d78-97f9ee24a7d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.965035 4663 generic.go:334] "Generic (PLEG): container finished" podID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerID="6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea" exitCode=0 Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.965066 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l8vxk" event={"ID":"95873577-a4a4-41dc-8d78-97f9ee24a7d5","Type":"ContainerDied","Data":"6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea"} Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.965094 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l8vxk" event={"ID":"95873577-a4a4-41dc-8d78-97f9ee24a7d5","Type":"ContainerDied","Data":"dc3797ff6796fd62c1264c11ac0e2bb8f28fb4f2910a7f51ea62f439751380da"} Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.965102 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l8vxk" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.965109 4663 scope.go:117] "RemoveContainer" containerID="6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.973903 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.974032 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95873577-a4a4-41dc-8d78-97f9ee24a7d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.974056 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2dnx\" (UniqueName: \"kubernetes.io/projected/95873577-a4a4-41dc-8d78-97f9ee24a7d5-kube-api-access-v2dnx\") on node \"crc\" DevicePath \"\"" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.976258 4663 scope.go:117] "RemoveContainer" containerID="e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05" Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.987509 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l8vxk"] Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.990627 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l8vxk"] Dec 12 14:17:57 crc kubenswrapper[4663]: I1212 14:17:57.991834 4663 scope.go:117] "RemoveContainer" containerID="14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.027830 4663 scope.go:117] "RemoveContainer" containerID="6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea" Dec 12 14:17:58 crc kubenswrapper[4663]: E1212 14:17:58.028159 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea\": container with ID starting with 6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea not found: ID does not exist" containerID="6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.028195 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea"} err="failed to get container status \"6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea\": rpc error: code = NotFound desc = could not find container \"6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea\": container with ID starting with 6fa7461ecda943f699986ae93489e245ab9102e34c8048dcf4367661e7e969ea not found: ID does not exist" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.028217 4663 scope.go:117] "RemoveContainer" containerID="e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05" Dec 12 14:17:58 crc kubenswrapper[4663]: E1212 14:17:58.028589 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05\": container with ID starting with e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05 not found: ID does not exist" containerID="e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.028616 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05"} err="failed to get container status \"e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05\": rpc error: code = NotFound desc = could not find container \"e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05\": container with ID starting with e72d02e7c4e14e23454109a37d8da994819934d9b0b66ccfbee2e6c32e73fd05 not found: ID does not exist" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.028635 4663 scope.go:117] "RemoveContainer" containerID="14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769" Dec 12 14:17:58 crc kubenswrapper[4663]: E1212 14:17:58.028936 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769\": container with ID starting with 14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769 not found: ID does not exist" containerID="14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.028960 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769"} err="failed to get container status \"14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769\": rpc error: code = NotFound desc = could not find container \"14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769\": container with ID starting with 14811355f92be3b7a4c86ffb5302070fe442f2d3bb285a179bed52aebcc80769 not found: ID does not exist" Dec 12 14:17:58 crc kubenswrapper[4663]: I1212 14:17:58.874867 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" path="/var/lib/kubelet/pods/95873577-a4a4-41dc-8d78-97f9ee24a7d5/volumes" Dec 12 14:18:06 crc kubenswrapper[4663]: I1212 14:18:06.129228 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:18:06 crc kubenswrapper[4663]: I1212 14:18:06.130292 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.357029 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bss47"] Dec 12 14:18:12 crc kubenswrapper[4663]: E1212 14:18:12.357651 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="extract-utilities" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.357662 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="extract-utilities" Dec 12 14:18:12 crc kubenswrapper[4663]: E1212 14:18:12.357677 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="extract-content" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.357682 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="extract-content" Dec 12 14:18:12 crc kubenswrapper[4663]: E1212 14:18:12.357695 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="registry-server" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.357702 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="registry-server" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.357823 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="95873577-a4a4-41dc-8d78-97f9ee24a7d5" containerName="registry-server" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.358487 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.367432 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bss47"] Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.420979 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-utilities\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.421100 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-catalog-content\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.421144 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zghc9\" (UniqueName: \"kubernetes.io/projected/cbefab3b-0492-47d5-b3fe-919babf1798c-kube-api-access-zghc9\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.521797 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-utilities\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.521875 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-catalog-content\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.521904 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zghc9\" (UniqueName: \"kubernetes.io/projected/cbefab3b-0492-47d5-b3fe-919babf1798c-kube-api-access-zghc9\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.522249 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-utilities\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.522300 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-catalog-content\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.538159 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zghc9\" (UniqueName: \"kubernetes.io/projected/cbefab3b-0492-47d5-b3fe-919babf1798c-kube-api-access-zghc9\") pod \"redhat-marketplace-bss47\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.678610 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:12 crc kubenswrapper[4663]: I1212 14:18:12.863143 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bss47"] Dec 12 14:18:13 crc kubenswrapper[4663]: I1212 14:18:13.029851 4663 generic.go:334] "Generic (PLEG): container finished" podID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerID="e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8" exitCode=0 Dec 12 14:18:13 crc kubenswrapper[4663]: I1212 14:18:13.029897 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerDied","Data":"e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8"} Dec 12 14:18:13 crc kubenswrapper[4663]: I1212 14:18:13.029927 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerStarted","Data":"612fc2878eb6f9371ff92d6270d173d4c576a11336b91638c38bebad9a088684"} Dec 12 14:18:14 crc kubenswrapper[4663]: I1212 14:18:14.035760 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerStarted","Data":"8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37"} Dec 12 14:18:15 crc kubenswrapper[4663]: I1212 14:18:15.042783 4663 generic.go:334] "Generic (PLEG): container finished" podID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerID="8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37" exitCode=0 Dec 12 14:18:15 crc kubenswrapper[4663]: I1212 14:18:15.042862 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerDied","Data":"8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37"} Dec 12 14:18:16 crc kubenswrapper[4663]: I1212 14:18:16.050253 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerStarted","Data":"48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37"} Dec 12 14:18:16 crc kubenswrapper[4663]: I1212 14:18:16.065996 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bss47" podStartSLOduration=1.374007462 podStartE2EDuration="4.065972195s" podCreationTimestamp="2025-12-12 14:18:12 +0000 UTC" firstStartedPulling="2025-12-12 14:18:13.031415861 +0000 UTC m=+872.456833904" lastFinishedPulling="2025-12-12 14:18:15.723380583 +0000 UTC m=+875.148798637" observedRunningTime="2025-12-12 14:18:16.063838008 +0000 UTC m=+875.489256062" watchObservedRunningTime="2025-12-12 14:18:16.065972195 +0000 UTC m=+875.491390250" Dec 12 14:18:22 crc kubenswrapper[4663]: I1212 14:18:22.679160 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:22 crc kubenswrapper[4663]: I1212 14:18:22.679381 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:22 crc kubenswrapper[4663]: I1212 14:18:22.707276 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:23 crc kubenswrapper[4663]: I1212 14:18:23.105734 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:23 crc kubenswrapper[4663]: I1212 14:18:23.132350 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bss47"] Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.088294 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bss47" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="registry-server" containerID="cri-o://48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37" gracePeriod=2 Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.370436 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.570832 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zghc9\" (UniqueName: \"kubernetes.io/projected/cbefab3b-0492-47d5-b3fe-919babf1798c-kube-api-access-zghc9\") pod \"cbefab3b-0492-47d5-b3fe-919babf1798c\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.570924 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-utilities\") pod \"cbefab3b-0492-47d5-b3fe-919babf1798c\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.570948 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-catalog-content\") pod \"cbefab3b-0492-47d5-b3fe-919babf1798c\" (UID: \"cbefab3b-0492-47d5-b3fe-919babf1798c\") " Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.571619 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-utilities" (OuterVolumeSpecName: "utilities") pod "cbefab3b-0492-47d5-b3fe-919babf1798c" (UID: "cbefab3b-0492-47d5-b3fe-919babf1798c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.575062 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbefab3b-0492-47d5-b3fe-919babf1798c-kube-api-access-zghc9" (OuterVolumeSpecName: "kube-api-access-zghc9") pod "cbefab3b-0492-47d5-b3fe-919babf1798c" (UID: "cbefab3b-0492-47d5-b3fe-919babf1798c"). InnerVolumeSpecName "kube-api-access-zghc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.585388 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cbefab3b-0492-47d5-b3fe-919babf1798c" (UID: "cbefab3b-0492-47d5-b3fe-919babf1798c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.672566 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.672592 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbefab3b-0492-47d5-b3fe-919babf1798c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:18:25 crc kubenswrapper[4663]: I1212 14:18:25.672602 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zghc9\" (UniqueName: \"kubernetes.io/projected/cbefab3b-0492-47d5-b3fe-919babf1798c-kube-api-access-zghc9\") on node \"crc\" DevicePath \"\"" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.093167 4663 generic.go:334] "Generic (PLEG): container finished" podID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerID="48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37" exitCode=0 Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.093216 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bss47" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.093230 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerDied","Data":"48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37"} Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.093474 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bss47" event={"ID":"cbefab3b-0492-47d5-b3fe-919babf1798c","Type":"ContainerDied","Data":"612fc2878eb6f9371ff92d6270d173d4c576a11336b91638c38bebad9a088684"} Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.093489 4663 scope.go:117] "RemoveContainer" containerID="48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.105887 4663 scope.go:117] "RemoveContainer" containerID="8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.112157 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bss47"] Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.114391 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bss47"] Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.117441 4663 scope.go:117] "RemoveContainer" containerID="e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.130319 4663 scope.go:117] "RemoveContainer" containerID="48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37" Dec 12 14:18:26 crc kubenswrapper[4663]: E1212 14:18:26.130585 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37\": container with ID starting with 48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37 not found: ID does not exist" containerID="48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.130611 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37"} err="failed to get container status \"48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37\": rpc error: code = NotFound desc = could not find container \"48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37\": container with ID starting with 48d3cd0a41b50236fef18fb45aa12eb200610cd80f334d2bef6dfe14c438fb37 not found: ID does not exist" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.130632 4663 scope.go:117] "RemoveContainer" containerID="8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37" Dec 12 14:18:26 crc kubenswrapper[4663]: E1212 14:18:26.130857 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37\": container with ID starting with 8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37 not found: ID does not exist" containerID="8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.130890 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37"} err="failed to get container status \"8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37\": rpc error: code = NotFound desc = could not find container \"8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37\": container with ID starting with 8465fb99cd7a90509beebbea20cd611f4f1fa2e27d5ae9229ca758f44f764d37 not found: ID does not exist" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.130904 4663 scope.go:117] "RemoveContainer" containerID="e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8" Dec 12 14:18:26 crc kubenswrapper[4663]: E1212 14:18:26.131099 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8\": container with ID starting with e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8 not found: ID does not exist" containerID="e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.131139 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8"} err="failed to get container status \"e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8\": rpc error: code = NotFound desc = could not find container \"e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8\": container with ID starting with e7dc10583f05b0ceca9571d605a6b643617ab43b08b4080ecae0e5c1e39675a8 not found: ID does not exist" Dec 12 14:18:26 crc kubenswrapper[4663]: I1212 14:18:26.875479 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" path="/var/lib/kubelet/pods/cbefab3b-0492-47d5-b3fe-919babf1798c/volumes" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.404936 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vndxt"] Dec 12 14:18:29 crc kubenswrapper[4663]: E1212 14:18:29.405429 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="extract-content" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.405449 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="extract-content" Dec 12 14:18:29 crc kubenswrapper[4663]: E1212 14:18:29.405468 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="registry-server" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.405474 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="registry-server" Dec 12 14:18:29 crc kubenswrapper[4663]: E1212 14:18:29.405480 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="extract-utilities" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.405485 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="extract-utilities" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.405583 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbefab3b-0492-47d5-b3fe-919babf1798c" containerName="registry-server" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.406290 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.412616 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vndxt"] Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.506191 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-utilities\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.506291 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-catalog-content\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.506312 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4v22\" (UniqueName: \"kubernetes.io/projected/d32033db-fc06-49f2-9ae7-6b7a4e84a398-kube-api-access-t4v22\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.607412 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-catalog-content\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.607630 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4v22\" (UniqueName: \"kubernetes.io/projected/d32033db-fc06-49f2-9ae7-6b7a4e84a398-kube-api-access-t4v22\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.607769 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-utilities\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.607832 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-catalog-content\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.608042 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-utilities\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.622623 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4v22\" (UniqueName: \"kubernetes.io/projected/d32033db-fc06-49f2-9ae7-6b7a4e84a398-kube-api-access-t4v22\") pod \"redhat-operators-vndxt\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:29 crc kubenswrapper[4663]: I1212 14:18:29.717976 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:30 crc kubenswrapper[4663]: I1212 14:18:30.062275 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vndxt"] Dec 12 14:18:30 crc kubenswrapper[4663]: W1212 14:18:30.065082 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd32033db_fc06_49f2_9ae7_6b7a4e84a398.slice/crio-3487d2f20ddf2906ea9af1f31f4a65796aae03e35ce4c9db63d52202d0f71e98 WatchSource:0}: Error finding container 3487d2f20ddf2906ea9af1f31f4a65796aae03e35ce4c9db63d52202d0f71e98: Status 404 returned error can't find the container with id 3487d2f20ddf2906ea9af1f31f4a65796aae03e35ce4c9db63d52202d0f71e98 Dec 12 14:18:30 crc kubenswrapper[4663]: I1212 14:18:30.110517 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerStarted","Data":"3487d2f20ddf2906ea9af1f31f4a65796aae03e35ce4c9db63d52202d0f71e98"} Dec 12 14:18:31 crc kubenswrapper[4663]: I1212 14:18:31.118441 4663 generic.go:334] "Generic (PLEG): container finished" podID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerID="3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6" exitCode=0 Dec 12 14:18:31 crc kubenswrapper[4663]: I1212 14:18:31.118485 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerDied","Data":"3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6"} Dec 12 14:18:32 crc kubenswrapper[4663]: I1212 14:18:32.124919 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerStarted","Data":"a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946"} Dec 12 14:18:33 crc kubenswrapper[4663]: I1212 14:18:33.129956 4663 generic.go:334] "Generic (PLEG): container finished" podID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerID="a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946" exitCode=0 Dec 12 14:18:33 crc kubenswrapper[4663]: I1212 14:18:33.129992 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerDied","Data":"a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946"} Dec 12 14:18:34 crc kubenswrapper[4663]: I1212 14:18:34.136968 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerStarted","Data":"4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0"} Dec 12 14:18:34 crc kubenswrapper[4663]: I1212 14:18:34.150370 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vndxt" podStartSLOduration=2.5947701050000003 podStartE2EDuration="5.150357308s" podCreationTimestamp="2025-12-12 14:18:29 +0000 UTC" firstStartedPulling="2025-12-12 14:18:31.119478733 +0000 UTC m=+890.544896788" lastFinishedPulling="2025-12-12 14:18:33.675065936 +0000 UTC m=+893.100483991" observedRunningTime="2025-12-12 14:18:34.147358068 +0000 UTC m=+893.572776122" watchObservedRunningTime="2025-12-12 14:18:34.150357308 +0000 UTC m=+893.575775352" Dec 12 14:18:36 crc kubenswrapper[4663]: I1212 14:18:36.128400 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:18:36 crc kubenswrapper[4663]: I1212 14:18:36.128449 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:18:39 crc kubenswrapper[4663]: I1212 14:18:39.719086 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:39 crc kubenswrapper[4663]: I1212 14:18:39.719290 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:39 crc kubenswrapper[4663]: I1212 14:18:39.744844 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:40 crc kubenswrapper[4663]: I1212 14:18:40.188977 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:40 crc kubenswrapper[4663]: I1212 14:18:40.217988 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vndxt"] Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.167887 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vndxt" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="registry-server" containerID="cri-o://4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0" gracePeriod=2 Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.456487 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.634552 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-catalog-content\") pod \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.634609 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4v22\" (UniqueName: \"kubernetes.io/projected/d32033db-fc06-49f2-9ae7-6b7a4e84a398-kube-api-access-t4v22\") pod \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.634655 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-utilities\") pod \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\" (UID: \"d32033db-fc06-49f2-9ae7-6b7a4e84a398\") " Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.635340 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-utilities" (OuterVolumeSpecName: "utilities") pod "d32033db-fc06-49f2-9ae7-6b7a4e84a398" (UID: "d32033db-fc06-49f2-9ae7-6b7a4e84a398"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.639523 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d32033db-fc06-49f2-9ae7-6b7a4e84a398-kube-api-access-t4v22" (OuterVolumeSpecName: "kube-api-access-t4v22") pod "d32033db-fc06-49f2-9ae7-6b7a4e84a398" (UID: "d32033db-fc06-49f2-9ae7-6b7a4e84a398"). InnerVolumeSpecName "kube-api-access-t4v22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.735894 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4v22\" (UniqueName: \"kubernetes.io/projected/d32033db-fc06-49f2-9ae7-6b7a4e84a398-kube-api-access-t4v22\") on node \"crc\" DevicePath \"\"" Dec 12 14:18:42 crc kubenswrapper[4663]: I1212 14:18:42.735919 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.173777 4663 generic.go:334] "Generic (PLEG): container finished" podID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerID="4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0" exitCode=0 Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.173812 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerDied","Data":"4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0"} Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.173819 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vndxt" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.173833 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vndxt" event={"ID":"d32033db-fc06-49f2-9ae7-6b7a4e84a398","Type":"ContainerDied","Data":"3487d2f20ddf2906ea9af1f31f4a65796aae03e35ce4c9db63d52202d0f71e98"} Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.173843 4663 scope.go:117] "RemoveContainer" containerID="4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.187845 4663 scope.go:117] "RemoveContainer" containerID="a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.199113 4663 scope.go:117] "RemoveContainer" containerID="3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.211512 4663 scope.go:117] "RemoveContainer" containerID="4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0" Dec 12 14:18:43 crc kubenswrapper[4663]: E1212 14:18:43.211781 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0\": container with ID starting with 4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0 not found: ID does not exist" containerID="4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.211809 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0"} err="failed to get container status \"4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0\": rpc error: code = NotFound desc = could not find container \"4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0\": container with ID starting with 4d2f9798518b8f5eb05332957bb5052f5e052d5df7ee7bb2676a744bd19dacd0 not found: ID does not exist" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.211828 4663 scope.go:117] "RemoveContainer" containerID="a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946" Dec 12 14:18:43 crc kubenswrapper[4663]: E1212 14:18:43.212103 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946\": container with ID starting with a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946 not found: ID does not exist" containerID="a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.212129 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946"} err="failed to get container status \"a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946\": rpc error: code = NotFound desc = could not find container \"a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946\": container with ID starting with a606f50837b835b6c65f6cc7be34f5ad5f0c5a8a7b1e849a3c758afb268b7946 not found: ID does not exist" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.212146 4663 scope.go:117] "RemoveContainer" containerID="3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6" Dec 12 14:18:43 crc kubenswrapper[4663]: E1212 14:18:43.212363 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6\": container with ID starting with 3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6 not found: ID does not exist" containerID="3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.212387 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6"} err="failed to get container status \"3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6\": rpc error: code = NotFound desc = could not find container \"3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6\": container with ID starting with 3e16b72a3aa9b90f2f953e2819e850efccc8f9c08949e10988b3509e596c61d6 not found: ID does not exist" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.709308 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d32033db-fc06-49f2-9ae7-6b7a4e84a398" (UID: "d32033db-fc06-49f2-9ae7-6b7a4e84a398"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.746227 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d32033db-fc06-49f2-9ae7-6b7a4e84a398-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.791736 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vndxt"] Dec 12 14:18:43 crc kubenswrapper[4663]: I1212 14:18:43.794961 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vndxt"] Dec 12 14:18:44 crc kubenswrapper[4663]: I1212 14:18:44.875459 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" path="/var/lib/kubelet/pods/d32033db-fc06-49f2-9ae7-6b7a4e84a398/volumes" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.248207 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5brl7"] Dec 12 14:18:51 crc kubenswrapper[4663]: E1212 14:18:51.248688 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="extract-utilities" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.248700 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="extract-utilities" Dec 12 14:18:51 crc kubenswrapper[4663]: E1212 14:18:51.248714 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="extract-content" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.248719 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="extract-content" Dec 12 14:18:51 crc kubenswrapper[4663]: E1212 14:18:51.248730 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="registry-server" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.248735 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="registry-server" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.248857 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="d32033db-fc06-49f2-9ae7-6b7a4e84a398" containerName="registry-server" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.249490 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.256022 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5brl7"] Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.421574 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-catalog-content\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.421804 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5dxp\" (UniqueName: \"kubernetes.io/projected/a029f1a5-8c01-48c0-92f9-87058fb0bb24-kube-api-access-p5dxp\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.421912 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-utilities\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.522570 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-utilities\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.522767 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-catalog-content\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.522895 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5dxp\" (UniqueName: \"kubernetes.io/projected/a029f1a5-8c01-48c0-92f9-87058fb0bb24-kube-api-access-p5dxp\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.522992 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-utilities\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.523054 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-catalog-content\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.545386 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5dxp\" (UniqueName: \"kubernetes.io/projected/a029f1a5-8c01-48c0-92f9-87058fb0bb24-kube-api-access-p5dxp\") pod \"certified-operators-5brl7\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.565221 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:18:51 crc kubenswrapper[4663]: I1212 14:18:51.927594 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5brl7"] Dec 12 14:18:52 crc kubenswrapper[4663]: I1212 14:18:52.209793 4663 generic.go:334] "Generic (PLEG): container finished" podID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerID="0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0" exitCode=0 Dec 12 14:18:52 crc kubenswrapper[4663]: I1212 14:18:52.209829 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5brl7" event={"ID":"a029f1a5-8c01-48c0-92f9-87058fb0bb24","Type":"ContainerDied","Data":"0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0"} Dec 12 14:18:52 crc kubenswrapper[4663]: I1212 14:18:52.209937 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5brl7" event={"ID":"a029f1a5-8c01-48c0-92f9-87058fb0bb24","Type":"ContainerStarted","Data":"10775d3c98d0e4d5acedde1c5fdf4c26296ec78a406c812121c6607c6bbac4c4"} Dec 12 14:18:53 crc kubenswrapper[4663]: I1212 14:18:53.215672 4663 generic.go:334] "Generic (PLEG): container finished" podID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerID="2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1" exitCode=0 Dec 12 14:18:53 crc kubenswrapper[4663]: I1212 14:18:53.215783 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5brl7" event={"ID":"a029f1a5-8c01-48c0-92f9-87058fb0bb24","Type":"ContainerDied","Data":"2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1"} Dec 12 14:18:54 crc kubenswrapper[4663]: I1212 14:18:54.222273 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5brl7" event={"ID":"a029f1a5-8c01-48c0-92f9-87058fb0bb24","Type":"ContainerStarted","Data":"1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360"} Dec 12 14:18:54 crc kubenswrapper[4663]: I1212 14:18:54.234574 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5brl7" podStartSLOduration=1.727608022 podStartE2EDuration="3.234559052s" podCreationTimestamp="2025-12-12 14:18:51 +0000 UTC" firstStartedPulling="2025-12-12 14:18:52.210621552 +0000 UTC m=+911.636039605" lastFinishedPulling="2025-12-12 14:18:53.717572582 +0000 UTC m=+913.142990635" observedRunningTime="2025-12-12 14:18:54.2331429 +0000 UTC m=+913.658560955" watchObservedRunningTime="2025-12-12 14:18:54.234559052 +0000 UTC m=+913.659977106" Dec 12 14:19:01 crc kubenswrapper[4663]: I1212 14:19:01.565644 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:19:01 crc kubenswrapper[4663]: I1212 14:19:01.566052 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:19:01 crc kubenswrapper[4663]: I1212 14:19:01.592523 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:19:02 crc kubenswrapper[4663]: I1212 14:19:02.278130 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:19:02 crc kubenswrapper[4663]: I1212 14:19:02.305103 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5brl7"] Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.260823 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5brl7" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="registry-server" containerID="cri-o://1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360" gracePeriod=2 Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.565283 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.765893 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-catalog-content\") pod \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.766565 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-utilities\") pod \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.766610 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5dxp\" (UniqueName: \"kubernetes.io/projected/a029f1a5-8c01-48c0-92f9-87058fb0bb24-kube-api-access-p5dxp\") pod \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\" (UID: \"a029f1a5-8c01-48c0-92f9-87058fb0bb24\") " Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.767281 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-utilities" (OuterVolumeSpecName: "utilities") pod "a029f1a5-8c01-48c0-92f9-87058fb0bb24" (UID: "a029f1a5-8c01-48c0-92f9-87058fb0bb24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.771535 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a029f1a5-8c01-48c0-92f9-87058fb0bb24-kube-api-access-p5dxp" (OuterVolumeSpecName: "kube-api-access-p5dxp") pod "a029f1a5-8c01-48c0-92f9-87058fb0bb24" (UID: "a029f1a5-8c01-48c0-92f9-87058fb0bb24"). InnerVolumeSpecName "kube-api-access-p5dxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.797702 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a029f1a5-8c01-48c0-92f9-87058fb0bb24" (UID: "a029f1a5-8c01-48c0-92f9-87058fb0bb24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.867736 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5dxp\" (UniqueName: \"kubernetes.io/projected/a029f1a5-8c01-48c0-92f9-87058fb0bb24-kube-api-access-p5dxp\") on node \"crc\" DevicePath \"\"" Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.867779 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:19:04 crc kubenswrapper[4663]: I1212 14:19:04.867790 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a029f1a5-8c01-48c0-92f9-87058fb0bb24-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.267326 4663 generic.go:334] "Generic (PLEG): container finished" podID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerID="1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360" exitCode=0 Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.267397 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5brl7" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.267423 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5brl7" event={"ID":"a029f1a5-8c01-48c0-92f9-87058fb0bb24","Type":"ContainerDied","Data":"1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360"} Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.267689 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5brl7" event={"ID":"a029f1a5-8c01-48c0-92f9-87058fb0bb24","Type":"ContainerDied","Data":"10775d3c98d0e4d5acedde1c5fdf4c26296ec78a406c812121c6607c6bbac4c4"} Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.267713 4663 scope.go:117] "RemoveContainer" containerID="1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.281018 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5brl7"] Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.281501 4663 scope.go:117] "RemoveContainer" containerID="2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.284535 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5brl7"] Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.292885 4663 scope.go:117] "RemoveContainer" containerID="0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.306383 4663 scope.go:117] "RemoveContainer" containerID="1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360" Dec 12 14:19:05 crc kubenswrapper[4663]: E1212 14:19:05.306712 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360\": container with ID starting with 1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360 not found: ID does not exist" containerID="1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.306770 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360"} err="failed to get container status \"1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360\": rpc error: code = NotFound desc = could not find container \"1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360\": container with ID starting with 1e5739a4c7929a288a4f9ae4a67904a18f74cc5451c9983338915c54ec962360 not found: ID does not exist" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.306797 4663 scope.go:117] "RemoveContainer" containerID="2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1" Dec 12 14:19:05 crc kubenswrapper[4663]: E1212 14:19:05.307301 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1\": container with ID starting with 2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1 not found: ID does not exist" containerID="2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.307335 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1"} err="failed to get container status \"2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1\": rpc error: code = NotFound desc = could not find container \"2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1\": container with ID starting with 2fdcb4f6e7439bd0c6a25737feeb32f38c0543f86f97d5195c208251d73b80d1 not found: ID does not exist" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.307355 4663 scope.go:117] "RemoveContainer" containerID="0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0" Dec 12 14:19:05 crc kubenswrapper[4663]: E1212 14:19:05.307683 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0\": container with ID starting with 0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0 not found: ID does not exist" containerID="0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0" Dec 12 14:19:05 crc kubenswrapper[4663]: I1212 14:19:05.307793 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0"} err="failed to get container status \"0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0\": rpc error: code = NotFound desc = could not find container \"0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0\": container with ID starting with 0e5349e1a6d183c73d0491d84562407bbe42e52a4eaf4c3bc6c106ab2d214bf0 not found: ID does not exist" Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.128590 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.128634 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.128668 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.129071 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3587f0b453a98e1b08acc442413a0bc680bac151e3b0ee99db93fabf4dbe76f2"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.129122 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://3587f0b453a98e1b08acc442413a0bc680bac151e3b0ee99db93fabf4dbe76f2" gracePeriod=600 Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.286436 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="3587f0b453a98e1b08acc442413a0bc680bac151e3b0ee99db93fabf4dbe76f2" exitCode=0 Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.286471 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"3587f0b453a98e1b08acc442413a0bc680bac151e3b0ee99db93fabf4dbe76f2"} Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.286496 4663 scope.go:117] "RemoveContainer" containerID="5c5ebe9bf5e9a3e64cdedfa80ce55ee02c552fa8f4a03ac34da20964a3cfae7b" Dec 12 14:19:06 crc kubenswrapper[4663]: I1212 14:19:06.875273 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" path="/var/lib/kubelet/pods/a029f1a5-8c01-48c0-92f9-87058fb0bb24/volumes" Dec 12 14:19:07 crc kubenswrapper[4663]: I1212 14:19:07.291914 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"f32ac463d452ff10f80a5cf12ae7a34d0051c4dad4fbbe040dcc24e3c7ae3a93"} Dec 12 14:19:56 crc kubenswrapper[4663]: E1212 14:19:56.873443 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:19:56 crc kubenswrapper[4663]: E1212 14:19:56.873865 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:19:56 crc kubenswrapper[4663]: E1212 14:19:56.873994 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-7p228_openstack-operators(d196ad18-5271-4392-a785-c4231f5d1cea): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:19:56 crc kubenswrapper[4663]: E1212 14:19:56.875147 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:20:11 crc kubenswrapper[4663]: E1212 14:20:11.871128 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:21:06 crc kubenswrapper[4663]: I1212 14:21:06.129005 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:21:06 crc kubenswrapper[4663]: I1212 14:21:06.129324 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:21:36 crc kubenswrapper[4663]: I1212 14:21:36.128471 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:21:36 crc kubenswrapper[4663]: I1212 14:21:36.128828 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.129150 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.129481 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.129524 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.130023 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f32ac463d452ff10f80a5cf12ae7a34d0051c4dad4fbbe040dcc24e3c7ae3a93"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.130072 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://f32ac463d452ff10f80a5cf12ae7a34d0051c4dad4fbbe040dcc24e3c7ae3a93" gracePeriod=600 Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.998020 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="f32ac463d452ff10f80a5cf12ae7a34d0051c4dad4fbbe040dcc24e3c7ae3a93" exitCode=0 Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.998095 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"f32ac463d452ff10f80a5cf12ae7a34d0051c4dad4fbbe040dcc24e3c7ae3a93"} Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.998352 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"6961a96a7123f6768d9bea07019a8e9e91588749e591d7799d861526b3a8059f"} Dec 12 14:22:06 crc kubenswrapper[4663]: I1212 14:22:06.998372 4663 scope.go:117] "RemoveContainer" containerID="3587f0b453a98e1b08acc442413a0bc680bac151e3b0ee99db93fabf4dbe76f2" Dec 12 14:22:26 crc kubenswrapper[4663]: E1212 14:22:26.873188 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:22:26 crc kubenswrapper[4663]: E1212 14:22:26.874011 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:22:26 crc kubenswrapper[4663]: E1212 14:22:26.874141 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-7p228_openstack-operators(d196ad18-5271-4392-a785-c4231f5d1cea): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:22:26 crc kubenswrapper[4663]: E1212 14:22:26.875278 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:22:38 crc kubenswrapper[4663]: E1212 14:22:38.871167 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:22:53 crc kubenswrapper[4663]: E1212 14:22:53.871236 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:23:04 crc kubenswrapper[4663]: E1212 14:23:04.871887 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:23:19 crc kubenswrapper[4663]: I1212 14:23:19.871419 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 12 14:24:06 crc kubenswrapper[4663]: I1212 14:24:06.128666 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:24:06 crc kubenswrapper[4663]: I1212 14:24:06.129065 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:24:36 crc kubenswrapper[4663]: I1212 14:24:36.128697 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:24:36 crc kubenswrapper[4663]: I1212 14:24:36.129074 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.128418 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.128734 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.128784 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.129252 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6961a96a7123f6768d9bea07019a8e9e91588749e591d7799d861526b3a8059f"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.129303 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://6961a96a7123f6768d9bea07019a8e9e91588749e591d7799d861526b3a8059f" gracePeriod=600 Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.707141 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="6961a96a7123f6768d9bea07019a8e9e91588749e591d7799d861526b3a8059f" exitCode=0 Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.707207 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"6961a96a7123f6768d9bea07019a8e9e91588749e591d7799d861526b3a8059f"} Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.707347 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7"} Dec 12 14:25:06 crc kubenswrapper[4663]: I1212 14:25:06.707366 4663 scope.go:117] "RemoveContainer" containerID="f32ac463d452ff10f80a5cf12ae7a34d0051c4dad4fbbe040dcc24e3c7ae3a93" Dec 12 14:25:19 crc kubenswrapper[4663]: E1212 14:25:19.875641 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:25:19 crc kubenswrapper[4663]: E1212 14:25:19.876015 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:25:19 crc kubenswrapper[4663]: E1212 14:25:19.876124 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-7p228_openstack-operators(d196ad18-5271-4392-a785-c4231f5d1cea): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:25:19 crc kubenswrapper[4663]: E1212 14:25:19.877334 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:25:30 crc kubenswrapper[4663]: E1212 14:25:30.873725 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:25:43 crc kubenswrapper[4663]: E1212 14:25:43.871433 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:25:54 crc kubenswrapper[4663]: E1212 14:25:54.871623 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:26:06 crc kubenswrapper[4663]: E1212 14:26:06.871343 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:26:18 crc kubenswrapper[4663]: E1212 14:26:18.871685 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:26:29 crc kubenswrapper[4663]: E1212 14:26:29.871801 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:27:06 crc kubenswrapper[4663]: I1212 14:27:06.129182 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:27:06 crc kubenswrapper[4663]: I1212 14:27:06.129556 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:27:36 crc kubenswrapper[4663]: I1212 14:27:36.128945 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:27:36 crc kubenswrapper[4663]: I1212 14:27:36.129864 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.128580 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.128973 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.129006 4663 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.129453 4663 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7"} pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.129510 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" containerID="cri-o://1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" gracePeriod=600 Dec 12 14:28:06 crc kubenswrapper[4663]: E1212 14:28:06.242685 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.427897 4663 generic.go:334] "Generic (PLEG): container finished" podID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" exitCode=0 Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.427931 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerDied","Data":"1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7"} Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.427966 4663 scope.go:117] "RemoveContainer" containerID="6961a96a7123f6768d9bea07019a8e9e91588749e591d7799d861526b3a8059f" Dec 12 14:28:06 crc kubenswrapper[4663]: I1212 14:28:06.428237 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:28:06 crc kubenswrapper[4663]: E1212 14:28:06.428419 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:28:17 crc kubenswrapper[4663]: I1212 14:28:17.870011 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:28:17 crc kubenswrapper[4663]: E1212 14:28:17.870484 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:28:28 crc kubenswrapper[4663]: I1212 14:28:28.870243 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:28:28 crc kubenswrapper[4663]: E1212 14:28:28.870708 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.875600 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mwskg"] Dec 12 14:28:36 crc kubenswrapper[4663]: E1212 14:28:36.876053 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="extract-utilities" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.876063 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="extract-utilities" Dec 12 14:28:36 crc kubenswrapper[4663]: E1212 14:28:36.876073 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="registry-server" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.876079 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="registry-server" Dec 12 14:28:36 crc kubenswrapper[4663]: E1212 14:28:36.876092 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="extract-content" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.876097 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="extract-content" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.876186 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="a029f1a5-8c01-48c0-92f9-87058fb0bb24" containerName="registry-server" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.876791 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwskg"] Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.876869 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.981375 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-catalog-content\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.981603 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q84rm\" (UniqueName: \"kubernetes.io/projected/4177bc28-113f-4609-98ba-924ee82f9cee-kube-api-access-q84rm\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:36 crc kubenswrapper[4663]: I1212 14:28:36.981824 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-utilities\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.083037 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-utilities\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.083113 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-catalog-content\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.083136 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q84rm\" (UniqueName: \"kubernetes.io/projected/4177bc28-113f-4609-98ba-924ee82f9cee-kube-api-access-q84rm\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.083455 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-utilities\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.083500 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-catalog-content\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.098327 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q84rm\" (UniqueName: \"kubernetes.io/projected/4177bc28-113f-4609-98ba-924ee82f9cee-kube-api-access-q84rm\") pod \"community-operators-mwskg\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.189376 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.520395 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwskg"] Dec 12 14:28:37 crc kubenswrapper[4663]: I1212 14:28:37.553566 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerStarted","Data":"f8166b14237c5d1b115e9672b2cda57d4483b917fa7d4c0a335985f9fef37941"} Dec 12 14:28:38 crc kubenswrapper[4663]: I1212 14:28:38.558378 4663 generic.go:334] "Generic (PLEG): container finished" podID="4177bc28-113f-4609-98ba-924ee82f9cee" containerID="4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097" exitCode=0 Dec 12 14:28:38 crc kubenswrapper[4663]: I1212 14:28:38.558415 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerDied","Data":"4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097"} Dec 12 14:28:38 crc kubenswrapper[4663]: I1212 14:28:38.560264 4663 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 12 14:28:39 crc kubenswrapper[4663]: I1212 14:28:39.563637 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerStarted","Data":"a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd"} Dec 12 14:28:40 crc kubenswrapper[4663]: I1212 14:28:40.569130 4663 generic.go:334] "Generic (PLEG): container finished" podID="4177bc28-113f-4609-98ba-924ee82f9cee" containerID="a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd" exitCode=0 Dec 12 14:28:40 crc kubenswrapper[4663]: I1212 14:28:40.569236 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerDied","Data":"a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd"} Dec 12 14:28:40 crc kubenswrapper[4663]: I1212 14:28:40.872401 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:28:40 crc kubenswrapper[4663]: E1212 14:28:40.872580 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:28:40 crc kubenswrapper[4663]: E1212 14:28:40.877863 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:28:40 crc kubenswrapper[4663]: E1212 14:28:40.877898 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:28:40 crc kubenswrapper[4663]: E1212 14:28:40.877996 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-7p228_openstack-operators(d196ad18-5271-4392-a785-c4231f5d1cea): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:28:40 crc kubenswrapper[4663]: E1212 14:28:40.879090 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.575023 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerStarted","Data":"12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99"} Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.589288 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mwskg" podStartSLOduration=2.942388695 podStartE2EDuration="5.589275299s" podCreationTimestamp="2025-12-12 14:28:36 +0000 UTC" firstStartedPulling="2025-12-12 14:28:38.559817198 +0000 UTC m=+1497.985235253" lastFinishedPulling="2025-12-12 14:28:41.206703802 +0000 UTC m=+1500.632121857" observedRunningTime="2025-12-12 14:28:41.58711347 +0000 UTC m=+1501.012531524" watchObservedRunningTime="2025-12-12 14:28:41.589275299 +0000 UTC m=+1501.014693342" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.660704 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tszgs"] Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.661685 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.670727 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tszgs"] Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.833820 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m2g5\" (UniqueName: \"kubernetes.io/projected/29f7e10a-a70f-4799-946b-b41e8c69eb3c-kube-api-access-2m2g5\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.833935 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-utilities\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.833961 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-catalog-content\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.935111 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-utilities\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.935165 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-catalog-content\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.935216 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m2g5\" (UniqueName: \"kubernetes.io/projected/29f7e10a-a70f-4799-946b-b41e8c69eb3c-kube-api-access-2m2g5\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.935532 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-utilities\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.935610 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-catalog-content\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.950539 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m2g5\" (UniqueName: \"kubernetes.io/projected/29f7e10a-a70f-4799-946b-b41e8c69eb3c-kube-api-access-2m2g5\") pod \"redhat-operators-tszgs\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:41 crc kubenswrapper[4663]: I1212 14:28:41.973364 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:42 crc kubenswrapper[4663]: I1212 14:28:42.324654 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tszgs"] Dec 12 14:28:42 crc kubenswrapper[4663]: W1212 14:28:42.329204 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29f7e10a_a70f_4799_946b_b41e8c69eb3c.slice/crio-ddb0a3cd11099405301728f0befc1d52c77b159ff16b9ad25de64377cf923ca9 WatchSource:0}: Error finding container ddb0a3cd11099405301728f0befc1d52c77b159ff16b9ad25de64377cf923ca9: Status 404 returned error can't find the container with id ddb0a3cd11099405301728f0befc1d52c77b159ff16b9ad25de64377cf923ca9 Dec 12 14:28:42 crc kubenswrapper[4663]: I1212 14:28:42.580927 4663 generic.go:334] "Generic (PLEG): container finished" podID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerID="1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7" exitCode=0 Dec 12 14:28:42 crc kubenswrapper[4663]: I1212 14:28:42.580986 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerDied","Data":"1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7"} Dec 12 14:28:42 crc kubenswrapper[4663]: I1212 14:28:42.581165 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerStarted","Data":"ddb0a3cd11099405301728f0befc1d52c77b159ff16b9ad25de64377cf923ca9"} Dec 12 14:28:43 crc kubenswrapper[4663]: I1212 14:28:43.586802 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerStarted","Data":"e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160"} Dec 12 14:28:44 crc kubenswrapper[4663]: I1212 14:28:44.594647 4663 generic.go:334] "Generic (PLEG): container finished" podID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerID="e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160" exitCode=0 Dec 12 14:28:44 crc kubenswrapper[4663]: I1212 14:28:44.594795 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerDied","Data":"e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160"} Dec 12 14:28:45 crc kubenswrapper[4663]: I1212 14:28:45.601920 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerStarted","Data":"c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5"} Dec 12 14:28:45 crc kubenswrapper[4663]: I1212 14:28:45.615126 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tszgs" podStartSLOduration=2.088457934 podStartE2EDuration="4.615113644s" podCreationTimestamp="2025-12-12 14:28:41 +0000 UTC" firstStartedPulling="2025-12-12 14:28:42.582074148 +0000 UTC m=+1502.007492202" lastFinishedPulling="2025-12-12 14:28:45.108729859 +0000 UTC m=+1504.534147912" observedRunningTime="2025-12-12 14:28:45.61239145 +0000 UTC m=+1505.037809503" watchObservedRunningTime="2025-12-12 14:28:45.615113644 +0000 UTC m=+1505.040531699" Dec 12 14:28:47 crc kubenswrapper[4663]: I1212 14:28:47.189924 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:47 crc kubenswrapper[4663]: I1212 14:28:47.189975 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:47 crc kubenswrapper[4663]: I1212 14:28:47.217271 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:47 crc kubenswrapper[4663]: I1212 14:28:47.635888 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:48 crc kubenswrapper[4663]: I1212 14:28:48.455562 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwskg"] Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.618127 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mwskg" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="registry-server" containerID="cri-o://12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99" gracePeriod=2 Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.900463 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.922503 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q84rm\" (UniqueName: \"kubernetes.io/projected/4177bc28-113f-4609-98ba-924ee82f9cee-kube-api-access-q84rm\") pod \"4177bc28-113f-4609-98ba-924ee82f9cee\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.922556 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-catalog-content\") pod \"4177bc28-113f-4609-98ba-924ee82f9cee\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.922599 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-utilities\") pod \"4177bc28-113f-4609-98ba-924ee82f9cee\" (UID: \"4177bc28-113f-4609-98ba-924ee82f9cee\") " Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.923331 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-utilities" (OuterVolumeSpecName: "utilities") pod "4177bc28-113f-4609-98ba-924ee82f9cee" (UID: "4177bc28-113f-4609-98ba-924ee82f9cee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.933017 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4177bc28-113f-4609-98ba-924ee82f9cee-kube-api-access-q84rm" (OuterVolumeSpecName: "kube-api-access-q84rm") pod "4177bc28-113f-4609-98ba-924ee82f9cee" (UID: "4177bc28-113f-4609-98ba-924ee82f9cee"). InnerVolumeSpecName "kube-api-access-q84rm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:28:49 crc kubenswrapper[4663]: I1212 14:28:49.961872 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4177bc28-113f-4609-98ba-924ee82f9cee" (UID: "4177bc28-113f-4609-98ba-924ee82f9cee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.023434 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q84rm\" (UniqueName: \"kubernetes.io/projected/4177bc28-113f-4609-98ba-924ee82f9cee-kube-api-access-q84rm\") on node \"crc\" DevicePath \"\"" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.023461 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.023471 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4177bc28-113f-4609-98ba-924ee82f9cee-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.624176 4663 generic.go:334] "Generic (PLEG): container finished" podID="4177bc28-113f-4609-98ba-924ee82f9cee" containerID="12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99" exitCode=0 Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.624215 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerDied","Data":"12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99"} Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.624223 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwskg" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.624248 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwskg" event={"ID":"4177bc28-113f-4609-98ba-924ee82f9cee","Type":"ContainerDied","Data":"f8166b14237c5d1b115e9672b2cda57d4483b917fa7d4c0a335985f9fef37941"} Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.624266 4663 scope.go:117] "RemoveContainer" containerID="12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.636965 4663 scope.go:117] "RemoveContainer" containerID="a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.643138 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwskg"] Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.645827 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mwskg"] Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.650777 4663 scope.go:117] "RemoveContainer" containerID="4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.679098 4663 scope.go:117] "RemoveContainer" containerID="12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99" Dec 12 14:28:50 crc kubenswrapper[4663]: E1212 14:28:50.679514 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99\": container with ID starting with 12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99 not found: ID does not exist" containerID="12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.679550 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99"} err="failed to get container status \"12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99\": rpc error: code = NotFound desc = could not find container \"12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99\": container with ID starting with 12aad4c5e62d1c2944d95f221a4e6e647a99f0c710a1d294cc1e9996e08b6d99 not found: ID does not exist" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.679573 4663 scope.go:117] "RemoveContainer" containerID="a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd" Dec 12 14:28:50 crc kubenswrapper[4663]: E1212 14:28:50.679997 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd\": container with ID starting with a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd not found: ID does not exist" containerID="a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.680019 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd"} err="failed to get container status \"a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd\": rpc error: code = NotFound desc = could not find container \"a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd\": container with ID starting with a857b177242ebe516ff73192ba17134e484ed65d8f98b3b19b5b1e9440debcdd not found: ID does not exist" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.680035 4663 scope.go:117] "RemoveContainer" containerID="4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097" Dec 12 14:28:50 crc kubenswrapper[4663]: E1212 14:28:50.680315 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097\": container with ID starting with 4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097 not found: ID does not exist" containerID="4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.680341 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097"} err="failed to get container status \"4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097\": rpc error: code = NotFound desc = could not find container \"4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097\": container with ID starting with 4ec94c586fa38148de46531ccfdbcaf87fc2cd909035615cd0c1a9c548495097 not found: ID does not exist" Dec 12 14:28:50 crc kubenswrapper[4663]: I1212 14:28:50.878945 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" path="/var/lib/kubelet/pods/4177bc28-113f-4609-98ba-924ee82f9cee/volumes" Dec 12 14:28:51 crc kubenswrapper[4663]: E1212 14:28:51.871789 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:28:51 crc kubenswrapper[4663]: I1212 14:28:51.974147 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:51 crc kubenswrapper[4663]: I1212 14:28:51.974187 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:52 crc kubenswrapper[4663]: I1212 14:28:52.001927 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:52 crc kubenswrapper[4663]: I1212 14:28:52.660261 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:52 crc kubenswrapper[4663]: I1212 14:28:52.870127 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:28:52 crc kubenswrapper[4663]: E1212 14:28:52.870336 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:28:53 crc kubenswrapper[4663]: I1212 14:28:53.454361 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tszgs"] Dec 12 14:28:54 crc kubenswrapper[4663]: I1212 14:28:54.641650 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tszgs" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="registry-server" containerID="cri-o://c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5" gracePeriod=2 Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.143218 4663 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29f7e10a_a70f_4799_946b_b41e8c69eb3c.slice/crio-conmon-c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5.scope\": RecentStats: unable to find data in memory cache]" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.257107 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8xn78"] Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.257334 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="registry-server" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.257352 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="registry-server" Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.257361 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="extract-content" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.257367 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="extract-content" Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.257380 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="extract-utilities" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.257385 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="extract-utilities" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.257482 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="4177bc28-113f-4609-98ba-924ee82f9cee" containerName="registry-server" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.258151 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.271426 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xn78"] Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.376651 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-catalog-content\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.376720 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-utilities\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.376848 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc5ht\" (UniqueName: \"kubernetes.io/projected/2f2b83d8-b1f2-4bae-b550-36adc221e145-kube-api-access-lc5ht\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.437470 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.478286 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-catalog-content\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.478342 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-utilities\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.478369 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc5ht\" (UniqueName: \"kubernetes.io/projected/2f2b83d8-b1f2-4bae-b550-36adc221e145-kube-api-access-lc5ht\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.478724 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-catalog-content\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.478789 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-utilities\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.493687 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc5ht\" (UniqueName: \"kubernetes.io/projected/2f2b83d8-b1f2-4bae-b550-36adc221e145-kube-api-access-lc5ht\") pod \"redhat-marketplace-8xn78\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.571972 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.579332 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-utilities\") pod \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.579421 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-catalog-content\") pod \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.579461 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m2g5\" (UniqueName: \"kubernetes.io/projected/29f7e10a-a70f-4799-946b-b41e8c69eb3c-kube-api-access-2m2g5\") pod \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\" (UID: \"29f7e10a-a70f-4799-946b-b41e8c69eb3c\") " Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.580141 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-utilities" (OuterVolumeSpecName: "utilities") pod "29f7e10a-a70f-4799-946b-b41e8c69eb3c" (UID: "29f7e10a-a70f-4799-946b-b41e8c69eb3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.582109 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29f7e10a-a70f-4799-946b-b41e8c69eb3c-kube-api-access-2m2g5" (OuterVolumeSpecName: "kube-api-access-2m2g5") pod "29f7e10a-a70f-4799-946b-b41e8c69eb3c" (UID: "29f7e10a-a70f-4799-946b-b41e8c69eb3c"). InnerVolumeSpecName "kube-api-access-2m2g5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.649825 4663 generic.go:334] "Generic (PLEG): container finished" podID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerID="c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5" exitCode=0 Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.649856 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerDied","Data":"c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5"} Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.649881 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tszgs" event={"ID":"29f7e10a-a70f-4799-946b-b41e8c69eb3c","Type":"ContainerDied","Data":"ddb0a3cd11099405301728f0befc1d52c77b159ff16b9ad25de64377cf923ca9"} Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.649896 4663 scope.go:117] "RemoveContainer" containerID="c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.649986 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tszgs" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.668901 4663 scope.go:117] "RemoveContainer" containerID="e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.671449 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29f7e10a-a70f-4799-946b-b41e8c69eb3c" (UID: "29f7e10a-a70f-4799-946b-b41e8c69eb3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.680370 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.680396 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29f7e10a-a70f-4799-946b-b41e8c69eb3c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.680409 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m2g5\" (UniqueName: \"kubernetes.io/projected/29f7e10a-a70f-4799-946b-b41e8c69eb3c-kube-api-access-2m2g5\") on node \"crc\" DevicePath \"\"" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.684158 4663 scope.go:117] "RemoveContainer" containerID="1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.717273 4663 scope.go:117] "RemoveContainer" containerID="c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5" Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.717687 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5\": container with ID starting with c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5 not found: ID does not exist" containerID="c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.717715 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5"} err="failed to get container status \"c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5\": rpc error: code = NotFound desc = could not find container \"c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5\": container with ID starting with c556c5926b88725aed72db98131bd0ce1f428eb96f4be3c1429a50afb07907d5 not found: ID does not exist" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.717735 4663 scope.go:117] "RemoveContainer" containerID="e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160" Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.718054 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160\": container with ID starting with e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160 not found: ID does not exist" containerID="e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.718074 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160"} err="failed to get container status \"e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160\": rpc error: code = NotFound desc = could not find container \"e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160\": container with ID starting with e7d7cb96ab07a3f673f1b9448fc4eb1da6d75fd5a34f86c648cf903975a9e160 not found: ID does not exist" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.718088 4663 scope.go:117] "RemoveContainer" containerID="1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7" Dec 12 14:28:55 crc kubenswrapper[4663]: E1212 14:28:55.718365 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7\": container with ID starting with 1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7 not found: ID does not exist" containerID="1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.718406 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7"} err="failed to get container status \"1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7\": rpc error: code = NotFound desc = could not find container \"1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7\": container with ID starting with 1d5fbc52a45be5070ca78d7cffeb73b1cc96931557904250d6244e0992d5bbf7 not found: ID does not exist" Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.725850 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xn78"] Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.970849 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tszgs"] Dec 12 14:28:55 crc kubenswrapper[4663]: I1212 14:28:55.973413 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tszgs"] Dec 12 14:28:56 crc kubenswrapper[4663]: I1212 14:28:56.654874 4663 generic.go:334] "Generic (PLEG): container finished" podID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerID="3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1" exitCode=0 Dec 12 14:28:56 crc kubenswrapper[4663]: I1212 14:28:56.654942 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xn78" event={"ID":"2f2b83d8-b1f2-4bae-b550-36adc221e145","Type":"ContainerDied","Data":"3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1"} Dec 12 14:28:56 crc kubenswrapper[4663]: I1212 14:28:56.654968 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xn78" event={"ID":"2f2b83d8-b1f2-4bae-b550-36adc221e145","Type":"ContainerStarted","Data":"59113d54c9aad5b76549c287d7d24cce64ad5839b4d1423c55c65229cbcd64ae"} Dec 12 14:28:56 crc kubenswrapper[4663]: I1212 14:28:56.875464 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" path="/var/lib/kubelet/pods/29f7e10a-a70f-4799-946b-b41e8c69eb3c/volumes" Dec 12 14:28:58 crc kubenswrapper[4663]: I1212 14:28:58.665382 4663 generic.go:334] "Generic (PLEG): container finished" podID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerID="82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4" exitCode=0 Dec 12 14:28:58 crc kubenswrapper[4663]: I1212 14:28:58.665465 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xn78" event={"ID":"2f2b83d8-b1f2-4bae-b550-36adc221e145","Type":"ContainerDied","Data":"82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4"} Dec 12 14:28:59 crc kubenswrapper[4663]: I1212 14:28:59.671104 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xn78" event={"ID":"2f2b83d8-b1f2-4bae-b550-36adc221e145","Type":"ContainerStarted","Data":"5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2"} Dec 12 14:28:59 crc kubenswrapper[4663]: I1212 14:28:59.684501 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8xn78" podStartSLOduration=2.16642254 podStartE2EDuration="4.684486132s" podCreationTimestamp="2025-12-12 14:28:55 +0000 UTC" firstStartedPulling="2025-12-12 14:28:56.656155659 +0000 UTC m=+1516.081573713" lastFinishedPulling="2025-12-12 14:28:59.17421925 +0000 UTC m=+1518.599637305" observedRunningTime="2025-12-12 14:28:59.681662245 +0000 UTC m=+1519.107080298" watchObservedRunningTime="2025-12-12 14:28:59.684486132 +0000 UTC m=+1519.109904185" Dec 12 14:29:05 crc kubenswrapper[4663]: I1212 14:29:05.572940 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:29:05 crc kubenswrapper[4663]: I1212 14:29:05.573276 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:29:05 crc kubenswrapper[4663]: I1212 14:29:05.602874 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:29:05 crc kubenswrapper[4663]: I1212 14:29:05.720349 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:29:05 crc kubenswrapper[4663]: I1212 14:29:05.828166 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xn78"] Dec 12 14:29:05 crc kubenswrapper[4663]: E1212 14:29:05.870948 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:29:06 crc kubenswrapper[4663]: I1212 14:29:06.869805 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:29:06 crc kubenswrapper[4663]: E1212 14:29:06.870195 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:29:07 crc kubenswrapper[4663]: I1212 14:29:07.702291 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8xn78" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="registry-server" containerID="cri-o://5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2" gracePeriod=2 Dec 12 14:29:07 crc kubenswrapper[4663]: I1212 14:29:07.994737 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.014908 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc5ht\" (UniqueName: \"kubernetes.io/projected/2f2b83d8-b1f2-4bae-b550-36adc221e145-kube-api-access-lc5ht\") pod \"2f2b83d8-b1f2-4bae-b550-36adc221e145\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.020730 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f2b83d8-b1f2-4bae-b550-36adc221e145-kube-api-access-lc5ht" (OuterVolumeSpecName: "kube-api-access-lc5ht") pod "2f2b83d8-b1f2-4bae-b550-36adc221e145" (UID: "2f2b83d8-b1f2-4bae-b550-36adc221e145"). InnerVolumeSpecName "kube-api-access-lc5ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.115678 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-catalog-content\") pod \"2f2b83d8-b1f2-4bae-b550-36adc221e145\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.115731 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-utilities\") pod \"2f2b83d8-b1f2-4bae-b550-36adc221e145\" (UID: \"2f2b83d8-b1f2-4bae-b550-36adc221e145\") " Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.116062 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc5ht\" (UniqueName: \"kubernetes.io/projected/2f2b83d8-b1f2-4bae-b550-36adc221e145-kube-api-access-lc5ht\") on node \"crc\" DevicePath \"\"" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.116363 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-utilities" (OuterVolumeSpecName: "utilities") pod "2f2b83d8-b1f2-4bae-b550-36adc221e145" (UID: "2f2b83d8-b1f2-4bae-b550-36adc221e145"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.130729 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f2b83d8-b1f2-4bae-b550-36adc221e145" (UID: "2f2b83d8-b1f2-4bae-b550-36adc221e145"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.216815 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.216838 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f2b83d8-b1f2-4bae-b550-36adc221e145-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.708127 4663 generic.go:334] "Generic (PLEG): container finished" podID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerID="5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2" exitCode=0 Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.708162 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xn78" event={"ID":"2f2b83d8-b1f2-4bae-b550-36adc221e145","Type":"ContainerDied","Data":"5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2"} Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.708183 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8xn78" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.708195 4663 scope.go:117] "RemoveContainer" containerID="5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.708185 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8xn78" event={"ID":"2f2b83d8-b1f2-4bae-b550-36adc221e145","Type":"ContainerDied","Data":"59113d54c9aad5b76549c287d7d24cce64ad5839b4d1423c55c65229cbcd64ae"} Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.722132 4663 scope.go:117] "RemoveContainer" containerID="82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.762131 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xn78"] Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.762176 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8xn78"] Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.771884 4663 scope.go:117] "RemoveContainer" containerID="3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.786739 4663 scope.go:117] "RemoveContainer" containerID="5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2" Dec 12 14:29:08 crc kubenswrapper[4663]: E1212 14:29:08.787077 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2\": container with ID starting with 5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2 not found: ID does not exist" containerID="5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.787115 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2"} err="failed to get container status \"5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2\": rpc error: code = NotFound desc = could not find container \"5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2\": container with ID starting with 5341f11530fe873902775c95ac61c5817d82c2d512248f0b1f84a6aff5add4f2 not found: ID does not exist" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.787143 4663 scope.go:117] "RemoveContainer" containerID="82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4" Dec 12 14:29:08 crc kubenswrapper[4663]: E1212 14:29:08.787457 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4\": container with ID starting with 82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4 not found: ID does not exist" containerID="82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.787505 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4"} err="failed to get container status \"82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4\": rpc error: code = NotFound desc = could not find container \"82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4\": container with ID starting with 82da7f4eac3974656e5daf531984113a34ee12ea0b959971bec008510283d5d4 not found: ID does not exist" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.787529 4663 scope.go:117] "RemoveContainer" containerID="3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1" Dec 12 14:29:08 crc kubenswrapper[4663]: E1212 14:29:08.787984 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1\": container with ID starting with 3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1 not found: ID does not exist" containerID="3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.788010 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1"} err="failed to get container status \"3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1\": rpc error: code = NotFound desc = could not find container \"3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1\": container with ID starting with 3181a4efc0d6a5d99ea5fa2e3c034edd34eea60c4f6ef6021b588670bb271ca1 not found: ID does not exist" Dec 12 14:29:08 crc kubenswrapper[4663]: I1212 14:29:08.875307 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" path="/var/lib/kubelet/pods/2f2b83d8-b1f2-4bae-b550-36adc221e145/volumes" Dec 12 14:29:17 crc kubenswrapper[4663]: E1212 14:29:17.871614 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:29:20 crc kubenswrapper[4663]: I1212 14:29:20.872189 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:29:20 crc kubenswrapper[4663]: E1212 14:29:20.872533 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.869610 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.870201 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.871027 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963596 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tbgrf"] Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.963790 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="extract-content" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963805 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="extract-content" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.963813 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="extract-utilities" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963820 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="extract-utilities" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.963828 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="extract-utilities" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963833 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="extract-utilities" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.963846 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="extract-content" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963851 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="extract-content" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.963859 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="registry-server" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963864 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="registry-server" Dec 12 14:29:32 crc kubenswrapper[4663]: E1212 14:29:32.963873 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="registry-server" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963878 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="registry-server" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963966 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f2b83d8-b1f2-4bae-b550-36adc221e145" containerName="registry-server" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.963975 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="29f7e10a-a70f-4799-946b-b41e8c69eb3c" containerName="registry-server" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.964583 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.971568 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tbgrf"] Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.975728 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v5sm\" (UniqueName: \"kubernetes.io/projected/de7b714c-3573-4467-826f-88c6bd7cf129-kube-api-access-6v5sm\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.975790 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-catalog-content\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:32 crc kubenswrapper[4663]: I1212 14:29:32.975872 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-utilities\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.076977 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v5sm\" (UniqueName: \"kubernetes.io/projected/de7b714c-3573-4467-826f-88c6bd7cf129-kube-api-access-6v5sm\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.077038 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-catalog-content\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.077116 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-utilities\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.077490 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-catalog-content\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.077519 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-utilities\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.092475 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v5sm\" (UniqueName: \"kubernetes.io/projected/de7b714c-3573-4467-826f-88c6bd7cf129-kube-api-access-6v5sm\") pod \"certified-operators-tbgrf\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.278574 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.670175 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tbgrf"] Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.812859 4663 generic.go:334] "Generic (PLEG): container finished" podID="de7b714c-3573-4467-826f-88c6bd7cf129" containerID="b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef" exitCode=0 Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.812913 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbgrf" event={"ID":"de7b714c-3573-4467-826f-88c6bd7cf129","Type":"ContainerDied","Data":"b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef"} Dec 12 14:29:33 crc kubenswrapper[4663]: I1212 14:29:33.813042 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbgrf" event={"ID":"de7b714c-3573-4467-826f-88c6bd7cf129","Type":"ContainerStarted","Data":"ad075bfef1a90e0ddb2fd40a0e9b894b0135209b6b960e749019edf026fe5fbd"} Dec 12 14:29:34 crc kubenswrapper[4663]: I1212 14:29:34.818324 4663 generic.go:334] "Generic (PLEG): container finished" podID="de7b714c-3573-4467-826f-88c6bd7cf129" containerID="8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa" exitCode=0 Dec 12 14:29:34 crc kubenswrapper[4663]: I1212 14:29:34.818365 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbgrf" event={"ID":"de7b714c-3573-4467-826f-88c6bd7cf129","Type":"ContainerDied","Data":"8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa"} Dec 12 14:29:35 crc kubenswrapper[4663]: I1212 14:29:35.824131 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbgrf" event={"ID":"de7b714c-3573-4467-826f-88c6bd7cf129","Type":"ContainerStarted","Data":"78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366"} Dec 12 14:29:35 crc kubenswrapper[4663]: I1212 14:29:35.838863 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tbgrf" podStartSLOduration=2.048307205 podStartE2EDuration="3.838849952s" podCreationTimestamp="2025-12-12 14:29:32 +0000 UTC" firstStartedPulling="2025-12-12 14:29:33.813833605 +0000 UTC m=+1553.239251659" lastFinishedPulling="2025-12-12 14:29:35.604376352 +0000 UTC m=+1555.029794406" observedRunningTime="2025-12-12 14:29:35.834915428 +0000 UTC m=+1555.260333482" watchObservedRunningTime="2025-12-12 14:29:35.838849952 +0000 UTC m=+1555.264268006" Dec 12 14:29:43 crc kubenswrapper[4663]: I1212 14:29:43.279762 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:43 crc kubenswrapper[4663]: I1212 14:29:43.280148 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:43 crc kubenswrapper[4663]: I1212 14:29:43.308431 4663 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:43 crc kubenswrapper[4663]: E1212 14:29:43.870603 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:29:43 crc kubenswrapper[4663]: I1212 14:29:43.881919 4663 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:43 crc kubenswrapper[4663]: I1212 14:29:43.909165 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tbgrf"] Dec 12 14:29:45 crc kubenswrapper[4663]: I1212 14:29:45.864703 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tbgrf" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="registry-server" containerID="cri-o://78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366" gracePeriod=2 Dec 12 14:29:45 crc kubenswrapper[4663]: I1212 14:29:45.869863 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:29:45 crc kubenswrapper[4663]: E1212 14:29:45.870042 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.151178 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.219418 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-catalog-content\") pod \"de7b714c-3573-4467-826f-88c6bd7cf129\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.219487 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-utilities\") pod \"de7b714c-3573-4467-826f-88c6bd7cf129\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.219528 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6v5sm\" (UniqueName: \"kubernetes.io/projected/de7b714c-3573-4467-826f-88c6bd7cf129-kube-api-access-6v5sm\") pod \"de7b714c-3573-4467-826f-88c6bd7cf129\" (UID: \"de7b714c-3573-4467-826f-88c6bd7cf129\") " Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.220110 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-utilities" (OuterVolumeSpecName: "utilities") pod "de7b714c-3573-4467-826f-88c6bd7cf129" (UID: "de7b714c-3573-4467-826f-88c6bd7cf129"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.223379 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de7b714c-3573-4467-826f-88c6bd7cf129-kube-api-access-6v5sm" (OuterVolumeSpecName: "kube-api-access-6v5sm") pod "de7b714c-3573-4467-826f-88c6bd7cf129" (UID: "de7b714c-3573-4467-826f-88c6bd7cf129"). InnerVolumeSpecName "kube-api-access-6v5sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.254221 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de7b714c-3573-4467-826f-88c6bd7cf129" (UID: "de7b714c-3573-4467-826f-88c6bd7cf129"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.321021 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6v5sm\" (UniqueName: \"kubernetes.io/projected/de7b714c-3573-4467-826f-88c6bd7cf129-kube-api-access-6v5sm\") on node \"crc\" DevicePath \"\"" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.321050 4663 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.321059 4663 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b714c-3573-4467-826f-88c6bd7cf129-utilities\") on node \"crc\" DevicePath \"\"" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.870704 4663 generic.go:334] "Generic (PLEG): container finished" podID="de7b714c-3573-4467-826f-88c6bd7cf129" containerID="78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366" exitCode=0 Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.870800 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tbgrf" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.873837 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbgrf" event={"ID":"de7b714c-3573-4467-826f-88c6bd7cf129","Type":"ContainerDied","Data":"78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366"} Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.873872 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tbgrf" event={"ID":"de7b714c-3573-4467-826f-88c6bd7cf129","Type":"ContainerDied","Data":"ad075bfef1a90e0ddb2fd40a0e9b894b0135209b6b960e749019edf026fe5fbd"} Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.873888 4663 scope.go:117] "RemoveContainer" containerID="78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.885686 4663 scope.go:117] "RemoveContainer" containerID="8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.892505 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tbgrf"] Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.896172 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tbgrf"] Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.896636 4663 scope.go:117] "RemoveContainer" containerID="b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.910693 4663 scope.go:117] "RemoveContainer" containerID="78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366" Dec 12 14:29:46 crc kubenswrapper[4663]: E1212 14:29:46.911046 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366\": container with ID starting with 78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366 not found: ID does not exist" containerID="78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.911073 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366"} err="failed to get container status \"78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366\": rpc error: code = NotFound desc = could not find container \"78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366\": container with ID starting with 78f29976c2ef28cfed35790d467abcc6cddbaae4e32cb144043b1156494ec366 not found: ID does not exist" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.911090 4663 scope.go:117] "RemoveContainer" containerID="8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa" Dec 12 14:29:46 crc kubenswrapper[4663]: E1212 14:29:46.911275 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa\": container with ID starting with 8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa not found: ID does not exist" containerID="8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.911297 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa"} err="failed to get container status \"8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa\": rpc error: code = NotFound desc = could not find container \"8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa\": container with ID starting with 8c4755c15f5e5adbf7c79e5825619e87a038ad4986e2f65be3ea726851c9e5aa not found: ID does not exist" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.911311 4663 scope.go:117] "RemoveContainer" containerID="b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef" Dec 12 14:29:46 crc kubenswrapper[4663]: E1212 14:29:46.911759 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef\": container with ID starting with b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef not found: ID does not exist" containerID="b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef" Dec 12 14:29:46 crc kubenswrapper[4663]: I1212 14:29:46.911781 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef"} err="failed to get container status \"b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef\": rpc error: code = NotFound desc = could not find container \"b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef\": container with ID starting with b9739523979b4ec3cbcb483618289b22a73c58bbf392afc090eaa2015f073aef not found: ID does not exist" Dec 12 14:29:48 crc kubenswrapper[4663]: I1212 14:29:48.875186 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" path="/var/lib/kubelet/pods/de7b714c-3573-4467-826f-88c6bd7cf129/volumes" Dec 12 14:29:55 crc kubenswrapper[4663]: E1212 14:29:55.870874 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:29:57 crc kubenswrapper[4663]: I1212 14:29:57.870293 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:29:57 crc kubenswrapper[4663]: E1212 14:29:57.870472 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.124180 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn"] Dec 12 14:30:00 crc kubenswrapper[4663]: E1212 14:30:00.124537 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="extract-content" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.124547 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="extract-content" Dec 12 14:30:00 crc kubenswrapper[4663]: E1212 14:30:00.124566 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="registry-server" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.124572 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="registry-server" Dec 12 14:30:00 crc kubenswrapper[4663]: E1212 14:30:00.124581 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="extract-utilities" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.124586 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="extract-utilities" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.124666 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7b714c-3573-4467-826f-88c6bd7cf129" containerName="registry-server" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.125055 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.126606 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.126625 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.128884 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn"] Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.161368 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-secret-volume\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.161440 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hfwg\" (UniqueName: \"kubernetes.io/projected/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-kube-api-access-6hfwg\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.161464 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-config-volume\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.262406 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hfwg\" (UniqueName: \"kubernetes.io/projected/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-kube-api-access-6hfwg\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.262441 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-config-volume\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.262498 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-secret-volume\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.263314 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-config-volume\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.266359 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-secret-volume\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.274840 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hfwg\" (UniqueName: \"kubernetes.io/projected/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-kube-api-access-6hfwg\") pod \"collect-profiles-29425830-ndqfn\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.436822 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.763253 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn"] Dec 12 14:30:00 crc kubenswrapper[4663]: W1212 14:30:00.766271 4663 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cb2e2dd_8e87_42b0_aab1_6980cacb828e.slice/crio-cc5c2a3146fe71eacd1b27969e4c96eec7ab4f2728032e0e87611fe9b1f78c10 WatchSource:0}: Error finding container cc5c2a3146fe71eacd1b27969e4c96eec7ab4f2728032e0e87611fe9b1f78c10: Status 404 returned error can't find the container with id cc5c2a3146fe71eacd1b27969e4c96eec7ab4f2728032e0e87611fe9b1f78c10 Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.925830 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" event={"ID":"9cb2e2dd-8e87-42b0-aab1-6980cacb828e","Type":"ContainerStarted","Data":"cc9e725a40a4fce462447cdb7a891b99e80c2d838ebf8c3fe369066f6462deb0"} Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.925882 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" event={"ID":"9cb2e2dd-8e87-42b0-aab1-6980cacb828e","Type":"ContainerStarted","Data":"cc5c2a3146fe71eacd1b27969e4c96eec7ab4f2728032e0e87611fe9b1f78c10"} Dec 12 14:30:00 crc kubenswrapper[4663]: I1212 14:30:00.937333 4663 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" podStartSLOduration=0.937320434 podStartE2EDuration="937.320434ms" podCreationTimestamp="2025-12-12 14:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-12 14:30:00.935828359 +0000 UTC m=+1580.361246414" watchObservedRunningTime="2025-12-12 14:30:00.937320434 +0000 UTC m=+1580.362738488" Dec 12 14:30:01 crc kubenswrapper[4663]: I1212 14:30:01.931329 4663 generic.go:334] "Generic (PLEG): container finished" podID="9cb2e2dd-8e87-42b0-aab1-6980cacb828e" containerID="cc9e725a40a4fce462447cdb7a891b99e80c2d838ebf8c3fe369066f6462deb0" exitCode=0 Dec 12 14:30:01 crc kubenswrapper[4663]: I1212 14:30:01.931364 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" event={"ID":"9cb2e2dd-8e87-42b0-aab1-6980cacb828e","Type":"ContainerDied","Data":"cc9e725a40a4fce462447cdb7a891b99e80c2d838ebf8c3fe369066f6462deb0"} Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.103925 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.188128 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-config-volume\") pod \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.188243 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-secret-volume\") pod \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.188274 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hfwg\" (UniqueName: \"kubernetes.io/projected/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-kube-api-access-6hfwg\") pod \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\" (UID: \"9cb2e2dd-8e87-42b0-aab1-6980cacb828e\") " Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.188515 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-config-volume" (OuterVolumeSpecName: "config-volume") pod "9cb2e2dd-8e87-42b0-aab1-6980cacb828e" (UID: "9cb2e2dd-8e87-42b0-aab1-6980cacb828e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.192345 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9cb2e2dd-8e87-42b0-aab1-6980cacb828e" (UID: "9cb2e2dd-8e87-42b0-aab1-6980cacb828e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.192424 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-kube-api-access-6hfwg" (OuterVolumeSpecName: "kube-api-access-6hfwg") pod "9cb2e2dd-8e87-42b0-aab1-6980cacb828e" (UID: "9cb2e2dd-8e87-42b0-aab1-6980cacb828e"). InnerVolumeSpecName "kube-api-access-6hfwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.289253 4663 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.289281 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hfwg\" (UniqueName: \"kubernetes.io/projected/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-kube-api-access-6hfwg\") on node \"crc\" DevicePath \"\"" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.289290 4663 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9cb2e2dd-8e87-42b0-aab1-6980cacb828e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.940468 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" event={"ID":"9cb2e2dd-8e87-42b0-aab1-6980cacb828e","Type":"ContainerDied","Data":"cc5c2a3146fe71eacd1b27969e4c96eec7ab4f2728032e0e87611fe9b1f78c10"} Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.940513 4663 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc5c2a3146fe71eacd1b27969e4c96eec7ab4f2728032e0e87611fe9b1f78c10" Dec 12 14:30:03 crc kubenswrapper[4663]: I1212 14:30:03.940557 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29425830-ndqfn" Dec 12 14:30:06 crc kubenswrapper[4663]: E1212 14:30:06.871739 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:30:09 crc kubenswrapper[4663]: I1212 14:30:09.869591 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:30:09 crc kubenswrapper[4663]: E1212 14:30:09.870040 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:30:18 crc kubenswrapper[4663]: E1212 14:30:18.871839 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:30:23 crc kubenswrapper[4663]: I1212 14:30:23.869984 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:30:23 crc kubenswrapper[4663]: E1212 14:30:23.870316 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:30:31 crc kubenswrapper[4663]: E1212 14:30:31.871533 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:30:34 crc kubenswrapper[4663]: I1212 14:30:34.869805 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:30:34 crc kubenswrapper[4663]: E1212 14:30:34.870145 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:30:44 crc kubenswrapper[4663]: E1212 14:30:44.871433 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:30:47 crc kubenswrapper[4663]: I1212 14:30:47.869906 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:30:47 crc kubenswrapper[4663]: E1212 14:30:47.870270 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:30:55 crc kubenswrapper[4663]: E1212 14:30:55.871453 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:31:02 crc kubenswrapper[4663]: I1212 14:31:02.869596 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:31:02 crc kubenswrapper[4663]: E1212 14:31:02.870152 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.438454 4663 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-2xdq7/must-gather-8hwvk"] Dec 12 14:31:05 crc kubenswrapper[4663]: E1212 14:31:05.438853 4663 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cb2e2dd-8e87-42b0-aab1-6980cacb828e" containerName="collect-profiles" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.438864 4663 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cb2e2dd-8e87-42b0-aab1-6980cacb828e" containerName="collect-profiles" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.438984 4663 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cb2e2dd-8e87-42b0-aab1-6980cacb828e" containerName="collect-profiles" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.439453 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.443879 4663 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-2xdq7"/"default-dockercfg-9plfz" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.443885 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2xdq7"/"openshift-service-ca.crt" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.444105 4663 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-2xdq7"/"kube-root-ca.crt" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.447548 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2xdq7/must-gather-8hwvk"] Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.631818 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/10fcf0bf-73f7-461a-a993-c6fc36413438-must-gather-output\") pod \"must-gather-8hwvk\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.631871 4663 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbrgx\" (UniqueName: \"kubernetes.io/projected/10fcf0bf-73f7-461a-a993-c6fc36413438-kube-api-access-jbrgx\") pod \"must-gather-8hwvk\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.732624 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/10fcf0bf-73f7-461a-a993-c6fc36413438-must-gather-output\") pod \"must-gather-8hwvk\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.732674 4663 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbrgx\" (UniqueName: \"kubernetes.io/projected/10fcf0bf-73f7-461a-a993-c6fc36413438-kube-api-access-jbrgx\") pod \"must-gather-8hwvk\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.733004 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/10fcf0bf-73f7-461a-a993-c6fc36413438-must-gather-output\") pod \"must-gather-8hwvk\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.747147 4663 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbrgx\" (UniqueName: \"kubernetes.io/projected/10fcf0bf-73f7-461a-a993-c6fc36413438-kube-api-access-jbrgx\") pod \"must-gather-8hwvk\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:05 crc kubenswrapper[4663]: I1212 14:31:05.751860 4663 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:31:06 crc kubenswrapper[4663]: I1212 14:31:06.078629 4663 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-2xdq7/must-gather-8hwvk"] Dec 12 14:31:06 crc kubenswrapper[4663]: I1212 14:31:06.185371 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" event={"ID":"10fcf0bf-73f7-461a-a993-c6fc36413438","Type":"ContainerStarted","Data":"a78c919bfbf0e6b3b9c19d641b4d0048ebd10977d5e9df5b060bb419efaf3a05"} Dec 12 14:31:10 crc kubenswrapper[4663]: E1212 14:31:10.874067 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:31:13 crc kubenswrapper[4663]: I1212 14:31:13.217566 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" event={"ID":"10fcf0bf-73f7-461a-a993-c6fc36413438","Type":"ContainerStarted","Data":"9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3"} Dec 12 14:31:13 crc kubenswrapper[4663]: I1212 14:31:13.218461 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" event={"ID":"10fcf0bf-73f7-461a-a993-c6fc36413438","Type":"ContainerStarted","Data":"5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7"} Dec 12 14:31:14 crc kubenswrapper[4663]: I1212 14:31:14.870409 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:31:14 crc kubenswrapper[4663]: E1212 14:31:14.870773 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:31:28 crc kubenswrapper[4663]: I1212 14:31:28.870340 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:31:28 crc kubenswrapper[4663]: E1212 14:31:28.880112 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:31:35 crc kubenswrapper[4663]: I1212 14:31:35.836102 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-zcz24_fdcb9400-6fea-46b5-ac30-975a690d67c5/control-plane-machine-set-operator/0.log" Dec 12 14:31:35 crc kubenswrapper[4663]: I1212 14:31:35.916898 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lq2p4_317dd582-032f-45be-ab3d-30f199ac2261/kube-rbac-proxy/0.log" Dec 12 14:31:35 crc kubenswrapper[4663]: I1212 14:31:35.944124 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-lq2p4_317dd582-032f-45be-ab3d-30f199ac2261/machine-api-operator/0.log" Dec 12 14:31:39 crc kubenswrapper[4663]: I1212 14:31:39.869436 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:31:39 crc kubenswrapper[4663]: E1212 14:31:39.869792 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:31:43 crc kubenswrapper[4663]: I1212 14:31:43.021517 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-tw7gw_06657a47-365d-4912-9444-21c7ead0ee4a/cert-manager-controller/0.log" Dec 12 14:31:43 crc kubenswrapper[4663]: I1212 14:31:43.118874 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-h4msx_ba4281a0-b789-4914-8ad6-aeea0b349c7b/cert-manager-cainjector/0.log" Dec 12 14:31:43 crc kubenswrapper[4663]: I1212 14:31:43.133712 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-lgmg9_8faa1738-9a0b-4e74-99ad-c7ea18277b98/cert-manager-webhook/0.log" Dec 12 14:31:50 crc kubenswrapper[4663]: I1212 14:31:50.179412 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-wp687_b62c8bbe-eb6b-4f0a-ade2-813a6a149c36/nmstate-console-plugin/0.log" Dec 12 14:31:50 crc kubenswrapper[4663]: I1212 14:31:50.283630 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-pg97r_3e5fad3b-f0aa-4dd2-a485-d44a365605f1/nmstate-handler/0.log" Dec 12 14:31:50 crc kubenswrapper[4663]: I1212 14:31:50.348184 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-v5c4t_e0838bcf-5ff7-49cc-a108-ac6c9f98b92c/nmstate-metrics/0.log" Dec 12 14:31:50 crc kubenswrapper[4663]: I1212 14:31:50.354115 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-v5c4t_e0838bcf-5ff7-49cc-a108-ac6c9f98b92c/kube-rbac-proxy/0.log" Dec 12 14:31:50 crc kubenswrapper[4663]: I1212 14:31:50.449739 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-rwbmc_890e13cb-bf05-4143-9dba-9979bbfbe8e8/nmstate-operator/0.log" Dec 12 14:31:50 crc kubenswrapper[4663]: I1212 14:31:50.468231 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-dw2nz_19e4fc6a-b15f-4c91-9caa-08e836723adc/nmstate-webhook/0.log" Dec 12 14:31:51 crc kubenswrapper[4663]: I1212 14:31:51.870299 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:31:51 crc kubenswrapper[4663]: E1212 14:31:51.870509 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.209658 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-d267t_ba53dfba-45e3-4ac2-93ca-67772a7acdef/kube-rbac-proxy/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.302266 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-d267t_ba53dfba-45e3-4ac2-93ca-67772a7acdef/controller/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.331895 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-frr-files/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.459211 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-reloader/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.461640 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-frr-files/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.468604 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-reloader/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.473123 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-metrics/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.570385 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-frr-files/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.584860 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-reloader/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.589658 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-metrics/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.609838 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-metrics/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.704841 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-reloader/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.720988 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-metrics/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.732550 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/cp-frr-files/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.735141 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/controller/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.845865 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/kube-rbac-proxy/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.853486 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/frr-metrics/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.871159 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/frr/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.876875 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/kube-rbac-proxy-frr/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.959108 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-m5rz7_1d2f5585-abc1-4104-8d45-9e3ff2976653/reloader/0.log" Dec 12 14:31:59 crc kubenswrapper[4663]: I1212 14:31:59.982158 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-twfrw_ed966777-b283-45df-b619-93ff7eac79d2/frr-k8s-webhook-server/0.log" Dec 12 14:32:00 crc kubenswrapper[4663]: I1212 14:32:00.089679 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-97b7cbd45-gpcln_1f208d44-68e4-40c1-803e-751fe8609d8a/manager/0.log" Dec 12 14:32:00 crc kubenswrapper[4663]: I1212 14:32:00.096387 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5bdcb8754-8xmhq_6ee18c00-7a8c-4652-8bc8-ef4108f8d534/webhook-server/0.log" Dec 12 14:32:00 crc kubenswrapper[4663]: I1212 14:32:00.192995 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-ktlsq_95cf0800-d909-4e76-87c9-c41e4df21a56/kube-rbac-proxy/0.log" Dec 12 14:32:00 crc kubenswrapper[4663]: I1212 14:32:00.286780 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-ktlsq_95cf0800-d909-4e76-87c9-c41e4df21a56/speaker/0.log" Dec 12 14:32:02 crc kubenswrapper[4663]: I1212 14:32:02.869887 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:32:02 crc kubenswrapper[4663]: E1212 14:32:02.870132 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.517980 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/util/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.612274 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/util/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.629769 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/pull/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.630659 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/pull/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.740215 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/util/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.740957 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/extract/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.741019 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4g4v4j_19b5b820-9d1f-4a41-ac3a-b6609a4be881/pull/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.848688 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/util/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.970909 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/pull/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.974546 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/util/0.log" Dec 12 14:32:07 crc kubenswrapper[4663]: I1212 14:32:07.985456 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/pull/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.102517 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/util/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.104924 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/extract/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.107497 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa86mbst_2ff84d75-5a7c-4a71-8e6b-fcc02d38b392/pull/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.197651 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/extract-utilities/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.315810 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/extract-content/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.319101 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/extract-utilities/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.337773 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/extract-content/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.455509 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/extract-utilities/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.461686 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/extract-content/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.587877 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/extract-utilities/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.689024 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2rkr4_2d853a53-6f16-4db7-be39-c939984126e6/registry-server/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.719160 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/extract-utilities/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.737939 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/extract-content/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.759512 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/extract-content/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.875110 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/extract-content/0.log" Dec 12 14:32:08 crc kubenswrapper[4663]: I1212 14:32:08.875613 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.011189 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-d9jff_675a4c10-36f1-4fd9-97be-bfb87fe63f9a/marketplace-operator/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.120739 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.128322 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gf9rl_66941324-ab16-4783-a1db-140f14c7f4d2/registry-server/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.210497 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/extract-content/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.213735 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.231722 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/extract-content/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.357532 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/extract-content/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.363761 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.419242 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6kscx_eecf200a-7ab0-4b4e-b914-ec2d18d44505/registry-server/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.499190 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.613153 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/extract-content/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.625120 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/extract-content/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.628797 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.730523 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/extract-utilities/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.734651 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/extract-content/0.log" Dec 12 14:32:09 crc kubenswrapper[4663]: I1212 14:32:09.929802 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4vt5n_2ded57f0-621c-4daa-b14c-4c3417835f5a/registry-server/0.log" Dec 12 14:32:14 crc kubenswrapper[4663]: I1212 14:32:14.869798 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:32:14 crc kubenswrapper[4663]: E1212 14:32:14.870153 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:32:27 crc kubenswrapper[4663]: I1212 14:32:27.869506 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:32:27 crc kubenswrapper[4663]: E1212 14:32:27.870018 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:32:40 crc kubenswrapper[4663]: I1212 14:32:40.872515 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:32:40 crc kubenswrapper[4663]: E1212 14:32:40.873049 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:32:53 crc kubenswrapper[4663]: I1212 14:32:53.614492 4663 generic.go:334] "Generic (PLEG): container finished" podID="10fcf0bf-73f7-461a-a993-c6fc36413438" containerID="5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7" exitCode=0 Dec 12 14:32:53 crc kubenswrapper[4663]: I1212 14:32:53.614570 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" event={"ID":"10fcf0bf-73f7-461a-a993-c6fc36413438","Type":"ContainerDied","Data":"5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7"} Dec 12 14:32:53 crc kubenswrapper[4663]: I1212 14:32:53.615030 4663 scope.go:117] "RemoveContainer" containerID="5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7" Dec 12 14:32:53 crc kubenswrapper[4663]: I1212 14:32:53.869620 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:32:53 crc kubenswrapper[4663]: E1212 14:32:53.869819 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kgwm2_openshift-machine-config-operator(74a58ec6-61d4-48ec-ad74-739c778f89b5)\"" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" Dec 12 14:32:54 crc kubenswrapper[4663]: I1212 14:32:54.574785 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2xdq7_must-gather-8hwvk_10fcf0bf-73f7-461a-a993-c6fc36413438/gather/0.log" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.039061 4663 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-2xdq7/must-gather-8hwvk"] Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.039579 4663 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" podUID="10fcf0bf-73f7-461a-a993-c6fc36413438" containerName="copy" containerID="cri-o://9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3" gracePeriod=2 Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.042090 4663 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-2xdq7/must-gather-8hwvk"] Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.323703 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2xdq7_must-gather-8hwvk_10fcf0bf-73f7-461a-a993-c6fc36413438/copy/0.log" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.324228 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.363805 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbrgx\" (UniqueName: \"kubernetes.io/projected/10fcf0bf-73f7-461a-a993-c6fc36413438-kube-api-access-jbrgx\") pod \"10fcf0bf-73f7-461a-a993-c6fc36413438\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.363973 4663 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/10fcf0bf-73f7-461a-a993-c6fc36413438-must-gather-output\") pod \"10fcf0bf-73f7-461a-a993-c6fc36413438\" (UID: \"10fcf0bf-73f7-461a-a993-c6fc36413438\") " Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.367474 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10fcf0bf-73f7-461a-a993-c6fc36413438-kube-api-access-jbrgx" (OuterVolumeSpecName: "kube-api-access-jbrgx") pod "10fcf0bf-73f7-461a-a993-c6fc36413438" (UID: "10fcf0bf-73f7-461a-a993-c6fc36413438"). InnerVolumeSpecName "kube-api-access-jbrgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.409532 4663 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10fcf0bf-73f7-461a-a993-c6fc36413438-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "10fcf0bf-73f7-461a-a993-c6fc36413438" (UID: "10fcf0bf-73f7-461a-a993-c6fc36413438"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.465476 4663 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/10fcf0bf-73f7-461a-a993-c6fc36413438-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.465508 4663 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbrgx\" (UniqueName: \"kubernetes.io/projected/10fcf0bf-73f7-461a-a993-c6fc36413438-kube-api-access-jbrgx\") on node \"crc\" DevicePath \"\"" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.647806 4663 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-2xdq7_must-gather-8hwvk_10fcf0bf-73f7-461a-a993-c6fc36413438/copy/0.log" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.648082 4663 generic.go:334] "Generic (PLEG): container finished" podID="10fcf0bf-73f7-461a-a993-c6fc36413438" containerID="9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3" exitCode=143 Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.648124 4663 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-2xdq7/must-gather-8hwvk" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.648133 4663 scope.go:117] "RemoveContainer" containerID="9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.659291 4663 scope.go:117] "RemoveContainer" containerID="5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.678632 4663 scope.go:117] "RemoveContainer" containerID="9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3" Dec 12 14:33:01 crc kubenswrapper[4663]: E1212 14:33:01.679049 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3\": container with ID starting with 9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3 not found: ID does not exist" containerID="9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.679074 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3"} err="failed to get container status \"9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3\": rpc error: code = NotFound desc = could not find container \"9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3\": container with ID starting with 9ba18d559903ae8aa803992e4b0410cc379716970ba43b3eefcb05b8892dd8d3 not found: ID does not exist" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.679094 4663 scope.go:117] "RemoveContainer" containerID="5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7" Dec 12 14:33:01 crc kubenswrapper[4663]: E1212 14:33:01.679409 4663 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7\": container with ID starting with 5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7 not found: ID does not exist" containerID="5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7" Dec 12 14:33:01 crc kubenswrapper[4663]: I1212 14:33:01.679450 4663 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7"} err="failed to get container status \"5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7\": rpc error: code = NotFound desc = could not find container \"5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7\": container with ID starting with 5fa865899131c9e2aab7f60a59eeed529ad8703edea953872121bded0e1412f7 not found: ID does not exist" Dec 12 14:33:02 crc kubenswrapper[4663]: I1212 14:33:02.877592 4663 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10fcf0bf-73f7-461a-a993-c6fc36413438" path="/var/lib/kubelet/pods/10fcf0bf-73f7-461a-a993-c6fc36413438/volumes" Dec 12 14:33:08 crc kubenswrapper[4663]: I1212 14:33:08.873096 4663 scope.go:117] "RemoveContainer" containerID="1080306dd908d1d7c1cee38114f238d33ad5839a43bd85dc03fdb32754da88d7" Dec 12 14:33:09 crc kubenswrapper[4663]: I1212 14:33:09.681779 4663 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" event={"ID":"74a58ec6-61d4-48ec-ad74-739c778f89b5","Type":"ContainerStarted","Data":"6a58b2b7011d0441d231a4c8ba5e74a94cc4f562064f3edb134b7cee1b6a3406"} Dec 12 14:33:25 crc kubenswrapper[4663]: E1212 14:33:25.875516 4663 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:33:25 crc kubenswrapper[4663]: E1212 14:33:25.875831 4663 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" image="38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea" Dec 12 14:33:25 crc kubenswrapper[4663]: E1212 14:33:25.875930 4663 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vc6l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-operator-index-7p228_openstack-operators(d196ad18-5271-4392-a785-c4231f5d1cea): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \"http://38.102.83.12:5001/v2/\": dial tcp 38.102.83.12:5001: i/o timeout" logger="UnhandledError" Dec 12 14:33:25 crc kubenswrapper[4663]: E1212 14:33:25.877104 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea: pinging container registry 38.102.83.12:5001: Get \\\"http://38.102.83.12:5001/v2/\\\": dial tcp 38.102.83.12:5001: i/o timeout\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:33:40 crc kubenswrapper[4663]: E1212 14:33:40.874294 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:33:55 crc kubenswrapper[4663]: E1212 14:33:55.871228 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:34:07 crc kubenswrapper[4663]: E1212 14:34:07.871130 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:34:20 crc kubenswrapper[4663]: E1212 14:34:20.874203 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:34:35 crc kubenswrapper[4663]: E1212 14:34:35.871393 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:34:50 crc kubenswrapper[4663]: E1212 14:34:50.874932 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:35:04 crc kubenswrapper[4663]: E1212 14:35:04.872150 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:35:19 crc kubenswrapper[4663]: E1212 14:35:19.872519 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:35:34 crc kubenswrapper[4663]: E1212 14:35:34.871900 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:35:36 crc kubenswrapper[4663]: I1212 14:35:36.129087 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:35:36 crc kubenswrapper[4663]: I1212 14:35:36.129167 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:35:46 crc kubenswrapper[4663]: E1212 14:35:46.871626 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:36:00 crc kubenswrapper[4663]: E1212 14:36:00.873730 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:36:06 crc kubenswrapper[4663]: I1212 14:36:06.129069 4663 patch_prober.go:28] interesting pod/machine-config-daemon-kgwm2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 12 14:36:06 crc kubenswrapper[4663]: I1212 14:36:06.129316 4663 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kgwm2" podUID="74a58ec6-61d4-48ec-ad74-739c778f89b5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 12 14:36:14 crc kubenswrapper[4663]: E1212 14:36:14.872104 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" Dec 12 14:36:25 crc kubenswrapper[4663]: E1212 14:36:25.871055 4663 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.12:5001/openstack-k8s-operators/openstack-operator-index:9a923a3e438c4f66834894bfa59207197cf3daea\\\"\"" pod="openstack-operators/openstack-operator-index-7p228" podUID="d196ad18-5271-4392-a785-c4231f5d1cea" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515117024163024445 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015117024163017362 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015117020047016502 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015117020047015452 5ustar corecore